hermann 0.26.0.0 → 0.26.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 1ff1d84d0e2dcf323ab815f629e08d5459d7a462
4
- data.tar.gz: df848b706e5e7ae27453a5ea184b0c02d9a07469
3
+ metadata.gz: 7a8c3c2350a07ffdafa0a2aaba48525f6ed12322
4
+ data.tar.gz: 9b6211fb45622bbbe676511014f76b3da1b74c8c
5
5
  SHA512:
6
- metadata.gz: 5049f62844ddaf48024efe4f7084a3b5cf0efb192ddf14a47ef1c9d5a3771d083ecdbcbf1f456f6d761fc7ebe9826003ab3d4ec4613238fe66b17b98c61ec8ec
7
- data.tar.gz: f5b2e3591b4c5a3458a664897643a67c08880ff2f528b40bd94ef68a85a10644cff7fd4a7c37cdd295c338369d0fd5a7e57c4a57091a264285e13fc31a8bce24
6
+ metadata.gz: 43fb663d9a068d3e425c08fcdb31cb17feaaa70a89b1ee46a9a48915eb76d7a6f025d2a0351c166b1f190bbe3fe7c6956bd61eeae0feb3928abd564985c717a5
7
+ data.tar.gz: a349bedefd031ce601e224755dc4d2e528c45e5bd43d3fc5b2ce9b74aae399fed3a77321402f6beca3bc8cf741d3de0a2186491e7dc4df683d9cef1ac52f207a
@@ -33,6 +33,11 @@
33
33
 
34
34
  #include "hermann_rdkafka.h"
35
35
 
36
+ /* This header file exposes the functions in librdkafka.a that are needed for
37
+ * consistent partitioning. After librdkafka releases a new tag and Hermann
38
+ * points to it, this can be removed. */
39
+ #include "rdcrc32.h"
40
+
36
41
  #ifdef HAVE_RUBY_VERSION_H
37
42
  #include <ruby/version.h>
38
43
  #endif
@@ -134,6 +139,18 @@ static void msg_delivered(rd_kafka_t *rk,
134
139
  }
135
140
  }
136
141
 
142
+
143
+ /* This function is in rdkafka.h on librdkafka master. As soon as a new
144
+ * version is released and Hermann points to it, this can be removed. */
145
+ int32_t rd_kafka_msg_partitioner_consistent (const rd_kafka_topic_t *rkt,
146
+ const void *key, size_t keylen,
147
+ int32_t partition_cnt,
148
+ void *rkt_opaque,
149
+ void *msg_opaque) {
150
+ return rd_crc32(key, keylen) % partition_cnt;
151
+ }
152
+
153
+
137
154
  /**
138
155
  * Producer partitioner callback.
139
156
  * Used to determine the target partition within a topic for production.
@@ -154,17 +171,11 @@ static int32_t producer_partitioner_callback(const rd_kafka_topic_t *rkt,
154
171
  int32_t partition_cnt,
155
172
  void *rkt_opaque,
156
173
  void *msg_opaque) {
157
- /* Pick a random partition */
158
- int retry = 0;
159
- int32_t partition = RD_KAFKA_PARTITION_UA;
160
-
161
- for (; retry < partition_cnt; retry++) {
162
- partition = rand() % partition_cnt;
163
- if (rd_kafka_topic_partition_available(rkt, partition)) {
164
- break; /* this one will do */
165
- }
174
+ if (keylen) {
175
+ return rd_kafka_msg_partitioner_consistent(rkt, keydata, keylen, partition_cnt, rkt_opaque, msg_opaque);
176
+ } else {
177
+ return rd_kafka_msg_partitioner_random(rkt, keydata, keylen, partition_cnt, rkt_opaque, msg_opaque);
166
178
  }
167
- return partition;
168
179
  }
169
180
 
170
181
  /**
@@ -589,10 +600,11 @@ void producer_init_kafka(VALUE self, HermannInstanceConfig* config) {
589
600
  * @param message VALUE the ruby String containing the outgoing message.
590
601
  * @param topic VALUE the ruby String containing the topic to use for the
591
602
  * outgoing message.
603
+ * @param key VALUE the ruby String containing the key to partition by
592
604
  * @param result VALUE the Hermann::Result object to be fulfilled when the
593
605
  * push completes
594
606
  */
595
- static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE result) {
607
+ static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE partition_key, VALUE result) {
596
608
 
597
609
  HermannInstanceConfig* producerConfig;
598
610
  /* Context pointer, pointing to `result`, for the librdkafka delivery
@@ -600,6 +612,7 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE
600
612
  */
601
613
  hermann_push_ctx_t *delivery_ctx = (hermann_push_ctx_t *)malloc(sizeof(hermann_push_ctx_t));
602
614
  rd_kafka_topic_t *rkt = NULL;
615
+ rd_kafka_topic_conf_t *rkt_conf = NULL;
603
616
 
604
617
  TRACER("self: %p, message: %p, result: %p)\n", self, message, result);
605
618
 
@@ -622,9 +635,15 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE
622
635
 
623
636
  TRACER("kafka initialized\n");
624
637
 
638
+ /* Topic configuration */
639
+ rkt_conf = rd_kafka_topic_conf_new();
640
+
641
+ /* Set the partitioner callback */
642
+ rd_kafka_topic_conf_set_partitioner_cb(rkt_conf, producer_partitioner_callback);
643
+
625
644
  rkt = rd_kafka_topic_new(producerConfig->rk,
626
645
  RSTRING_PTR(topic),
627
- NULL);
646
+ rkt_conf);
628
647
 
629
648
  if (NULL == rkt) {
630
649
  rb_raise(rb_eRuntimeError, "Could not construct a topic structure");
@@ -645,8 +664,8 @@ static VALUE producer_push_single(VALUE self, VALUE message, VALUE topic, VALUE
645
664
  RD_KAFKA_MSG_F_COPY,
646
665
  RSTRING_PTR(message),
647
666
  RSTRING_LEN(message),
648
- NULL,
649
- 0,
667
+ RSTRING_PTR(partition_key),
668
+ RSTRING_LEN(partition_key),
650
669
  delivery_ctx)) {
651
670
  fprintf(stderr, "%% Failed to produce to topic %s partition %i: %s\n",
652
671
  rd_kafka_topic_name(producerConfig->rkt), producerConfig->partition,
@@ -1240,7 +1259,7 @@ void Init_hermann_rdkafka() {
1240
1259
  rb_define_method(c_producer, "initialize_copy", producer_init_copy, 1);
1241
1260
 
1242
1261
  /* Producer.push_single(msg) */
1243
- rb_define_method(c_producer, "push_single", producer_push_single, 3);
1262
+ rb_define_method(c_producer, "push_single", producer_push_single, 4);
1244
1263
 
1245
1264
  /* Producer.tick */
1246
1265
  rb_define_method(c_producer, "tick", producer_tick, 1);
@@ -0,0 +1,103 @@
1
+ /**
2
+ * \file rdcrc32.h
3
+ * Functions and types for CRC checks.
4
+ *
5
+ * Generated on Tue May 8 17:36:59 2012,
6
+ * by pycrc v0.7.10, http://www.tty1.net/pycrc/
7
+ *
8
+ * NOTE: Contains librd modifications:
9
+ * - rd_crc32() helper.
10
+ * - __RDCRC32___H__ define (was missing the '32' part).
11
+ *
12
+ * using the configuration:
13
+ * Width = 32
14
+ * Poly = 0x04c11db7
15
+ * XorIn = 0xffffffff
16
+ * ReflectIn = True
17
+ * XorOut = 0xffffffff
18
+ * ReflectOut = True
19
+ * Algorithm = table-driven
20
+ *****************************************************************************/
21
+ #ifndef __RDCRC32___H__
22
+ #define __RDCRC32___H__
23
+
24
+ #include <stdlib.h>
25
+ #include <stdint.h>
26
+
27
+ #ifdef __cplusplus
28
+ extern "C" {
29
+ #endif
30
+
31
+
32
+ /**
33
+ * The definition of the used algorithm.
34
+ *****************************************************************************/
35
+ #define CRC_ALGO_TABLE_DRIVEN 1
36
+
37
+
38
+ /**
39
+ * The type of the CRC values.
40
+ *
41
+ * This type must be big enough to contain at least 32 bits.
42
+ *****************************************************************************/
43
+ typedef uint32_t rd_crc32_t;
44
+
45
+
46
+ /**
47
+ * Reflect all bits of a \a data word of \a data_len bytes.
48
+ *
49
+ * \param data The data word to be reflected.
50
+ * \param data_len The width of \a data expressed in number of bits.
51
+ * \return The reflected data.
52
+ *****************************************************************************/
53
+ rd_crc32_t rd_crc32_reflect(rd_crc32_t data, size_t data_len);
54
+
55
+
56
+ /**
57
+ * Calculate the initial crc value.
58
+ *
59
+ * \return The initial crc value.
60
+ *****************************************************************************/
61
+ static inline rd_crc32_t rd_crc32_init(void)
62
+ {
63
+ return 0xffffffff;
64
+ }
65
+
66
+
67
+ /**
68
+ * Update the crc value with new data.
69
+ *
70
+ * \param crc The current crc value.
71
+ * \param data Pointer to a buffer of \a data_len bytes.
72
+ * \param data_len Number of bytes in the \a data buffer.
73
+ * \return The updated crc value.
74
+ *****************************************************************************/
75
+ rd_crc32_t rd_crc32_update(rd_crc32_t crc, const unsigned char *data, size_t data_len);
76
+
77
+
78
+ /**
79
+ * Calculate the final crc value.
80
+ *
81
+ * \param crc The current crc value.
82
+ * \return The final crc value.
83
+ *****************************************************************************/
84
+ static inline rd_crc32_t rd_crc32_finalize(rd_crc32_t crc)
85
+ {
86
+ return crc ^ 0xffffffff;
87
+ }
88
+
89
+
90
+ /**
91
+ * Wrapper for performing CRC32 on the provided buffer.
92
+ */
93
+ static inline rd_crc32_t rd_crc32 (const char *data, size_t data_len) {
94
+ return rd_crc32_finalize(rd_crc32_update(rd_crc32_init(),
95
+ (const unsigned char *)data,
96
+ data_len));
97
+ }
98
+
99
+ #ifdef __cplusplus
100
+ } /* closing brace for extern "C" */
101
+ #endif
102
+
103
+ #endif /* __RDCRC32___H__ */
@@ -63,8 +63,7 @@ module Hermann
63
63
  end
64
64
 
65
65
  if Hermann.jruby?
66
- key = opts.has_key?(:partition_key) ? opts[:partition_key].to_java : nil
67
- result = @internal.push_single(value, topic, key)
66
+ result = @internal.push_single(value, topic, opts[:partition_key], nil)
68
67
  unless result.nil?
69
68
  @children << result
70
69
  end
@@ -76,7 +75,7 @@ module Hermann
76
75
  # librdkafka callback queue overflow
77
76
  tick_reactor
78
77
  result = create_result
79
- @internal.push_single(value, topic, result)
78
+ @internal.push_single(value, topic, opts[:partition_key].to_s, result)
80
79
  end
81
80
 
82
81
  return result
@@ -43,7 +43,8 @@ module Hermann
43
43
  # @return +Concurrent::Promise+ Representa a promise to send the
44
44
  # data to the kafka broker. Upon execution the Promise's status
45
45
  # will be set
46
- def push_single(msg, topic, key)
46
+ def push_single(msg, topic, key, _)
47
+ key = key && key.to_java
47
48
  Concurrent::Promise.execute {
48
49
  data = ProducerUtil::KeyedMessage.new(topic, nil, key, msg.to_java_bytes)
49
50
  begin
@@ -1,3 +1,3 @@
1
1
  module Hermann
2
- VERSION = '0.26.0'
2
+ VERSION = '0.26.1'
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hermann
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.26.0.0
4
+ version: 0.26.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - R. Tyler Croy
@@ -10,7 +10,7 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2015-09-08 00:00:00.000000000 Z
13
+ date: 2015-09-22 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
16
  name: json
@@ -68,6 +68,7 @@ files:
68
68
  - ext/hermann/extconf.rb
69
69
  - ext/hermann/hermann_rdkafka.c
70
70
  - ext/hermann/hermann_rdkafka.h
71
+ - ext/hermann/rdcrc32.h
71
72
  - lib/hermann.rb
72
73
  - lib/hermann/consumer.rb
73
74
  - lib/hermann/discovery/metadata.rb
@@ -102,9 +103,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
102
103
  version: '0'
103
104
  requirements: []
104
105
  rubyforge_project:
105
- rubygems_version: 2.4.3
106
+ rubygems_version: 2.4.8
106
107
  signing_key:
107
108
  specification_version: 3
108
109
  summary: A Kafka consumer/producer gem supporting both MRI and JRuby
109
110
  test_files: []
110
- has_rdoc: