karafka-rdkafka 0.12.2 → 0.12.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 1eb78fe36d0d36d5ba7b6313105d85fb0e392ecbf9a91214428bf0e77a356e14
4
- data.tar.gz: fcddc2950bdf3cef4f0f495497808a3cb865b7940b80d515ebd4e7ee7a1b52d2
3
+ metadata.gz: 26881045d33d54883dbc4483364b0162e0f380619fcfe4079c892817b04328bf
4
+ data.tar.gz: 9db9e87135682ff34cbc699dd20d6dcf43f5d2d82ee82153152b7d91eaf02152
5
5
  SHA512:
6
- metadata.gz: 44145d9a2d7d9b9231db9d0acfd40cf7adc54fe41eae67821196a067e85ed709d7e233df977f848a1ff4639bff9c383524dae40d17e9e37d2307bfdff4b8bb75
7
- data.tar.gz: 84df98bd3eb668168d61cdc09b0d64e310c6927e9fa70a451594c54324e688aadffd740023ee16ed50eb883f278b4285082d1ff7831350b695c3e48d2394662d
6
+ metadata.gz: 64200c66fb2623c04bb4dd3313173926a508e6a2a0678d5a5beab83dab75b4f597d6fd1d5b7fd2b875aec74db5338fa57bb54b79b1d420d9b04d1a5272c86171
7
+ data.tar.gz: ce91a9bd42090b85a13a2ba1d40779894b330aab5f50a074e093386c9137ca926ebf90d402a7a9179e2192251a9c55291f72f16ead9a8680f13ce20f659e68c4
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,3 +1,7 @@
1
+ # 0.12.3
2
+ - Include backtrace in non-raised binded errors.
3
+ - Include topic name in the delivery reports
4
+
1
5
  # 0.12.2
2
6
  * Increase the metadata default timeout from 250ms to 2 seconds. This should allow for working with remote clusters.
3
7
 
@@ -153,6 +153,7 @@ module Rdkafka
153
153
  ) do |_client_prr, err_code, reason, _opaque|
154
154
  if Rdkafka::Config.error_callback
155
155
  error = Rdkafka::RdkafkaError.new(err_code, broker_message: reason)
156
+ error.set_backtrace(caller)
156
157
  Rdkafka::Config.error_callback.call(error)
157
158
  end
158
159
  end
@@ -109,14 +109,18 @@ module Rdkafka
109
109
  message = Rdkafka::Bindings::Message.new(message_ptr)
110
110
  delivery_handle_ptr_address = message[:_private].address
111
111
  if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
112
+ topic_name = Rdkafka::Bindings.rd_kafka_topic_name(message[:rkt])
113
+
112
114
  # Update delivery handle
113
115
  delivery_handle[:response] = message[:err]
114
116
  delivery_handle[:partition] = message[:partition]
115
117
  delivery_handle[:offset] = message[:offset]
118
+ delivery_handle[:topic_name] = FFI::MemoryPointer.from_string(topic_name)
116
119
  delivery_handle[:pending] = false
120
+
117
121
  # Call delivery callback on opaque
118
122
  if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
119
- opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]), delivery_handle)
123
+ opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], topic_name, message[:err]), delivery_handle)
120
124
  end
121
125
  end
122
126
  end
@@ -6,7 +6,8 @@ module Rdkafka
6
6
  layout :pending, :bool,
7
7
  :response, :int,
8
8
  :partition, :int,
9
- :offset, :int64
9
+ :offset, :int64,
10
+ :topic_name, :pointer
10
11
 
11
12
  # @return [String] the name of the operation (e.g. "delivery")
12
13
  def operation_name
@@ -15,7 +16,7 @@ module Rdkafka
15
16
 
16
17
  # @return [DeliveryReport] a report on the delivery of the message
17
18
  def create_result
18
- DeliveryReport.new(self[:partition], self[:offset])
19
+ DeliveryReport.new(self[:partition], self[:offset], self[:topic_name].read_string)
19
20
  end
20
21
  end
21
22
  end
@@ -10,15 +10,20 @@ module Rdkafka
10
10
  # @return [Integer]
11
11
  attr_reader :offset
12
12
 
13
+ # The name of the topic this message was produced to.
14
+ # @return [String]
15
+ attr_reader :topic_name
16
+
13
17
  # Error in case happen during produce.
14
18
  # @return [String]
15
19
  attr_reader :error
16
20
 
17
21
  private
18
22
 
19
- def initialize(partition, offset, error = nil)
23
+ def initialize(partition, offset, topic_name = nil, error = nil)
20
24
  @partition = partition
21
25
  @offset = offset
26
+ @topic_name = topic_name
22
27
  @error = error
23
28
  end
24
29
  end
@@ -1,5 +1,5 @@
1
1
  module Rdkafka
2
- VERSION = "0.12.2"
2
+ VERSION = "0.12.3"
3
3
  LIBRDKAFKA_VERSION = "2.0.2"
4
4
  LIBRDKAFKA_SOURCE_SHA256 = "f321bcb1e015a34114c83cf1aa7b99ee260236aab096b85c003170c90a47ca9d"
5
5
  end
@@ -9,6 +9,7 @@ describe Rdkafka::Producer::DeliveryHandle do
9
9
  handle[:response] = response
10
10
  handle[:partition] = 2
11
11
  handle[:offset] = 100
12
+ handle[:topic_name] = FFI::MemoryPointer.from_string("produce_test_topic")
12
13
  end
13
14
  end
14
15
 
@@ -29,6 +30,7 @@ describe Rdkafka::Producer::DeliveryHandle do
29
30
 
30
31
  expect(report.partition).to eq(2)
31
32
  expect(report.offset).to eq(100)
33
+ expect(report.topic_name).to eq("produce_test_topic")
32
34
  end
33
35
 
34
36
  it "should wait without a timeout" do
@@ -36,6 +38,7 @@ describe Rdkafka::Producer::DeliveryHandle do
36
38
 
37
39
  expect(report.partition).to eq(2)
38
40
  expect(report.offset).to eq(100)
41
+ expect(report.topic_name).to eq("produce_test_topic")
39
42
  end
40
43
  end
41
44
  end
@@ -1,7 +1,7 @@
1
1
  require "spec_helper"
2
2
 
3
3
  describe Rdkafka::Producer::DeliveryReport do
4
- subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "error") }
4
+ subject { Rdkafka::Producer::DeliveryReport.new(2, 100, "topic", "error") }
5
5
 
6
6
  it "should get the partition" do
7
7
  expect(subject.partition).to eq 2
@@ -11,6 +11,10 @@ describe Rdkafka::Producer::DeliveryReport do
11
11
  expect(subject.offset).to eq 100
12
12
  end
13
13
 
14
+ it "should get the topic_name" do
15
+ expect(subject.topic_name).to eq "topic"
16
+ end
17
+
14
18
  it "should get the error" do
15
19
  expect(subject.error).to eq "error"
16
20
  end
@@ -30,6 +30,7 @@ describe Rdkafka::Producer do
30
30
  expect(report).not_to be_nil
31
31
  expect(report.partition).to eq 1
32
32
  expect(report.offset).to be >= 0
33
+ expect(report.topic_name).to eq "produce_test_topic"
33
34
  @callback_called = true
34
35
  end
35
36
 
@@ -113,6 +114,7 @@ describe Rdkafka::Producer do
113
114
  expect(called_report.first).not_to be_nil
114
115
  expect(called_report.first.partition).to eq 1
115
116
  expect(called_report.first.offset).to be >= 0
117
+ expect(called_report.first.topic_name).to eq "produce_test_topic"
116
118
  end
117
119
 
118
120
  it "should provide handle" do
@@ -448,7 +450,8 @@ describe Rdkafka::Producer do
448
450
 
449
451
  report_json = JSON.generate(
450
452
  "partition" => report.partition,
451
- "offset" => report.offset
453
+ "offset" => report.offset,
454
+ "topic_name" => report.topic_name
452
455
  )
453
456
 
454
457
  writer.write(report_json)
@@ -460,7 +463,8 @@ describe Rdkafka::Producer do
460
463
  report_hash = JSON.parse(reader.read)
461
464
  report = Rdkafka::Producer::DeliveryReport.new(
462
465
  report_hash["partition"],
463
- report_hash["offset"]
466
+ report_hash["offset"],
467
+ report_hash["topic_name"]
464
468
  )
465
469
 
466
470
  reader.close
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-rdkafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.12.2
4
+ version: 0.12.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Thijs Cadier
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2023-05-24 00:00:00.000000000 Z
38
+ date: 2023-05-26 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: ffi
metadata.gz.sig CHANGED
Binary file