rdkafka 0.16.0.beta1 → 0.16.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 3135d4f2663517517330d165948e9761ffc0ecd20942f911a6ee9541c437ee7e
4
- data.tar.gz: 9a489c2400c4054e9cec0d6c8d24f75bce407d370f8c393b7b31dcd0dbf7c361
3
+ metadata.gz: 07f1bd822870a6f28cd441b95870683442b3d5857f12d85e04057fa29a29121a
4
+ data.tar.gz: f734cec92e64dfdfbb6a7e3e1875ad402982aeea6d9e8e14befeb50144f2c125
5
5
  SHA512:
6
- metadata.gz: be8eba2aec012af189d893ebf6ddcd4e4dec117aecd7f36afa76bc6ee4c2a3fa93f4ac4160efa16e8c91efd7011ee41cda00a5e2146e18044a166035850cd490
7
- data.tar.gz: d1761a6ab9c7d9ee539d79679dc730cc392fc2d7369d3f78b9c8d8f1c800855ab15ae7c472259b43e95139aa30d4777026c0529671fd85f98f3e5fceeaf53e62
6
+ metadata.gz: 644f1435d988fd962cae3031d5d7c7a31de2d618e80def8f48913bda6309e08b81ff62741933b9ada5c40f59f79502e0a9dfee09efcc57fe7845269f178d1738
7
+ data.tar.gz: 3f83530ade65c38064b1c3a9922c173f973f57b49544fce3fd645a1c6a47ee18506450b33299e6417510f052123114fced56689a6ce2c59abd7d4be11b752c48
checksums.yaml.gz.sig CHANGED
Binary file
@@ -22,11 +22,11 @@ jobs:
22
22
  fail-fast: false
23
23
  matrix:
24
24
  ruby:
25
+ - '3.4.0-preview1'
25
26
  - '3.3'
26
27
  - '3.2'
27
28
  - '3.1'
28
29
  - '3.0'
29
- - '2.7'
30
30
  include:
31
31
  - ruby: '3.3'
32
32
  coverage: 'true'
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.3.1
1
+ 3.3.3
data/CHANGELOG.md CHANGED
@@ -1,10 +1,18 @@
1
1
  # Rdkafka Changelog
2
2
 
3
- ## 0.16.0 (Unreleased)
3
+ ## 0.16.0 (2024-06-13)
4
+ - **[Breaking]** Retire support for Ruby 2.7.
5
+ - **[Breaking]** Messages without headers returned by `#poll` contain frozen empty hash.
6
+ - **[Breaking]** `HashWithSymbolKeysTreatedLikeStrings` has been removed so headers are regular hashes with string keys.
7
+ - **[Feature]** Support incremental config describe + alter API.
4
8
  - **[Feature]** Oauthbearer token refresh callback (bruce-szalwinski-he)
9
+ - **[Feature]** Provide ability to use topic config on a producer for custom behaviors per dispatch.
10
+ - [Enhancement] Use topic config reference cache for messages production to prevent topic objects allocation with each message.
11
+ - [Enhancement] Provide `Rrdkafka::Admin#describe_errors` to get errors descriptions (mensfeld)
5
12
  - [Enhancement] Replace time poll based wait engine with an event based to improve response times on blocking operations and wait (nijikon + mensfeld)
6
13
  - [Enhancement] Allow for usage of the second regex engine of librdkafka by setting `RDKAFKA_DISABLE_REGEX_EXT` during build (mensfeld)
7
14
  - [Enhancement] name polling Thread as `rdkafka.native_kafka#<name>` (nijikon)
15
+ - [Enhancement] Save two objects on message produced and lower CPU usage on message produced with small improvements.
8
16
  - [Change] Allow for native kafka thread operations deferring and manual start for consumer, producer and admin.
9
17
  - [Change] The `wait_timeout` argument in `AbstractHandle.wait` method is deprecated and will be removed in future versions without replacement. We don't rely on it's value anymore (nijikon)
10
18
  - [Fix] Background logger stops working after forking causing memory leaks (mensfeld)
data/README.md CHANGED
@@ -163,6 +163,7 @@ bundle exec rake produce_messages
163
163
 
164
164
  | rdkafka-ruby | librdkafka |
165
165
  |-|-|
166
+ | 0.16.0 (2024-06-13) | 2.3.0 (2023-10-25) |
166
167
  | 0.15.0 (2023-12-03) | 2.3.0 (2023-10-25) |
167
168
  | 0.14.0 (2023-11-21) | 2.2.0 (2023-07-12) |
168
169
  | 0.13.0 (2023-07-24) | 2.0.2 (2023-01-20) |
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ # A single config binding result that represents its values extracted from C
6
+ class ConfigBindingResult
7
+ attr_reader :name, :value, :read_only, :default, :sensitive, :synonym, :synonyms
8
+
9
+ # @param config_ptr [FFI::Pointer] config pointer
10
+ def initialize(config_ptr)
11
+ @name = Bindings.rd_kafka_ConfigEntry_name(config_ptr)
12
+ @value = Bindings.rd_kafka_ConfigEntry_value(config_ptr)
13
+ @read_only = Bindings.rd_kafka_ConfigEntry_is_read_only(config_ptr)
14
+ @default = Bindings.rd_kafka_ConfigEntry_is_default(config_ptr)
15
+ @sensitive = Bindings.rd_kafka_ConfigEntry_is_sensitive(config_ptr)
16
+ @synonym = Bindings.rd_kafka_ConfigEntry_is_synonym(config_ptr)
17
+ @synonyms = []
18
+
19
+ # The code below builds up the config synonyms using same config binding
20
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
21
+ synonym_ptr = Bindings.rd_kafka_ConfigEntry_synonyms(config_ptr, pointer_to_size_t)
22
+ synonyms_ptr = synonym_ptr.read_array_of_pointer(pointer_to_size_t.read_int)
23
+
24
+ (1..pointer_to_size_t.read_int).map do |ar|
25
+ self.class.new synonyms_ptr[ar - 1]
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ # A simple binding that represents the requested config resource
6
+ class ConfigResourceBindingResult
7
+ attr_reader :name, :type, :configs, :configs_count
8
+
9
+ def initialize(config_resource_ptr)
10
+ ffi_binding = Bindings::ConfigResource.new(config_resource_ptr)
11
+
12
+ @name = ffi_binding[:name]
13
+ @type = ffi_binding[:type]
14
+ @configs = []
15
+ end
16
+ end
17
+ end
18
+ end
@@ -10,6 +10,7 @@ module Rdkafka
10
10
 
11
11
  def initialize(acls:, acls_count:)
12
12
  @acls=[]
13
+
13
14
  if acls != FFI::Pointer::NULL
14
15
  acl_binding_result_pointers = acls.read_array_of_pointer(acls_count)
15
16
  (1..acls_count).map do |acl_index|
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeConfigsHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer,
9
+ :config_entries, :pointer,
10
+ :entry_count, :int
11
+
12
+ # @return [String] the name of the operation.
13
+ def operation_name
14
+ "describe configs"
15
+ end
16
+
17
+ # @return [DescribeAclReport] instance with an array of acls that matches the request filters.
18
+ def create_result
19
+ DescribeConfigsReport.new(
20
+ config_entries: self[:config_entries],
21
+ entry_count: self[:entry_count]
22
+ )
23
+ end
24
+
25
+ def raise_error
26
+ raise RdkafkaError.new(
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
+ )
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class DescribeConfigsReport
6
+ attr_reader :resources
7
+
8
+ def initialize(config_entries:, entry_count:)
9
+ @resources=[]
10
+
11
+ return if config_entries == FFI::Pointer::NULL
12
+
13
+ config_entries
14
+ .read_array_of_pointer(entry_count)
15
+ .each { |config_resource_result_ptr| validate!(config_resource_result_ptr) }
16
+ .each do |config_resource_result_ptr|
17
+ config_resource_result = ConfigResourceBindingResult.new(config_resource_result_ptr)
18
+
19
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
20
+ configs_ptr = Bindings.rd_kafka_ConfigResource_configs(
21
+ config_resource_result_ptr,
22
+ pointer_to_size_t
23
+ )
24
+
25
+ configs_ptr
26
+ .read_array_of_pointer(pointer_to_size_t.read_int)
27
+ .map { |config_ptr| ConfigBindingResult.new(config_ptr) }
28
+ .each { |config_binding| config_resource_result.configs << config_binding }
29
+
30
+ @resources << config_resource_result
31
+ end
32
+ ensure
33
+ return if config_entries == FFI::Pointer::NULL
34
+
35
+ Bindings.rd_kafka_ConfigResource_destroy_array(config_entries, entry_count)
36
+ end
37
+
38
+ private
39
+
40
+ def validate!(config_resource_result_ptr)
41
+ code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
+
43
+ return if code.zero?
44
+
45
+ raise(
46
+ RdkafkaError.new(
47
+ code,
48
+ Bindings.rd_kafka_ConfigResource_error_string(config_resource_result_ptr)
49
+ )
50
+ )
51
+ end
52
+ end
53
+ end
54
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class IncrementalAlterConfigsHandle < AbstractHandle
6
+ layout :pending, :bool,
7
+ :response, :int,
8
+ :response_string, :pointer,
9
+ :config_entries, :pointer,
10
+ :entry_count, :int
11
+
12
+ # @return [String] the name of the operation.
13
+ def operation_name
14
+ "incremental alter configs"
15
+ end
16
+
17
+ # @return [DescribeAclReport] instance with an array of acls that matches the request filters.
18
+ def create_result
19
+ IncrementalAlterConfigsReport.new(
20
+ config_entries: self[:config_entries],
21
+ entry_count: self[:entry_count]
22
+ )
23
+ end
24
+
25
+ def raise_error
26
+ raise RdkafkaError.new(
27
+ self[:response],
28
+ broker_message: self[:response_string].read_string
29
+ )
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Admin
5
+ class IncrementalAlterConfigsReport
6
+ attr_reader :resources
7
+
8
+ def initialize(config_entries:, entry_count:)
9
+ @resources=[]
10
+
11
+ return if config_entries == FFI::Pointer::NULL
12
+
13
+ config_entries
14
+ .read_array_of_pointer(entry_count)
15
+ .each { |config_resource_result_ptr| validate!(config_resource_result_ptr) }
16
+ .each do |config_resource_result_ptr|
17
+ config_resource_result = ConfigResourceBindingResult.new(config_resource_result_ptr)
18
+
19
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
20
+ configs_ptr = Bindings.rd_kafka_ConfigResource_configs(
21
+ config_resource_result_ptr,
22
+ pointer_to_size_t
23
+ )
24
+
25
+ configs_ptr
26
+ .read_array_of_pointer(pointer_to_size_t.read_int)
27
+ .map { |config_ptr| ConfigBindingResult.new(config_ptr) }
28
+ .each { |config_binding| config_resource_result.configs << config_binding }
29
+
30
+ @resources << config_resource_result
31
+ end
32
+ ensure
33
+ return if config_entries == FFI::Pointer::NULL
34
+
35
+ Bindings.rd_kafka_ConfigResource_destroy_array(config_entries, entry_count)
36
+ end
37
+
38
+ private
39
+
40
+ def validate!(config_resource_result_ptr)
41
+ code = Bindings.rd_kafka_ConfigResource_error(config_resource_result_ptr)
42
+
43
+ return if code.zero?
44
+
45
+ raise(
46
+ RdkafkaError.new(
47
+ code,
48
+ Bindings.rd_kafka_ConfigResource_error_string(config_resource_result_ptr)
49
+ )
50
+ )
51
+ end
52
+ end
53
+ end
54
+ end
data/lib/rdkafka/admin.rb CHANGED
@@ -4,6 +4,50 @@ module Rdkafka
4
4
  class Admin
5
5
  include Helpers::OAuth
6
6
 
7
+ class << self
8
+ # Allows us to retrieve librdkafka errors with descriptions
9
+ # Useful for debugging and building UIs, etc.
10
+ #
11
+ # @return [Hash<Integer, Hash>] hash with errors mapped by code
12
+ def describe_errors
13
+ # Memory pointers for the array of structures and count
14
+ p_error_descs = FFI::MemoryPointer.new(:pointer)
15
+ p_count = FFI::MemoryPointer.new(:size_t)
16
+
17
+ # Call the attached function
18
+ Bindings.rd_kafka_get_err_descs(p_error_descs, p_count)
19
+
20
+ # Retrieve the number of items in the array
21
+ count = p_count.read_uint
22
+
23
+ # Get the pointer to the array of error descriptions
24
+ array_of_errors = FFI::Pointer.new(Bindings::NativeErrorDesc, p_error_descs.read_pointer)
25
+
26
+ errors = {}
27
+
28
+ count.times do |i|
29
+ # Get the pointer to each struct
30
+ error_ptr = array_of_errors[i]
31
+
32
+ # Create a new instance of NativeErrorDesc for each item
33
+ error_desc = Bindings::NativeErrorDesc.new(error_ptr)
34
+
35
+ # Read values from the struct
36
+ code = error_desc[:code]
37
+
38
+ name = ''
39
+ desc = ''
40
+
41
+ name = error_desc[:name].read_string unless error_desc[:name].null?
42
+ desc = error_desc[:desc].read_string unless error_desc[:desc].null?
43
+
44
+ errors[code] = { code: code, name: name, description: desc }
45
+ end
46
+
47
+ errors
48
+ end
49
+ end
50
+
7
51
  # @private
8
52
  def initialize(native_kafka)
9
53
  @native_kafka = native_kafka
@@ -620,6 +664,166 @@ module Rdkafka
620
664
  describe_acl_handle
621
665
  end
622
666
 
667
+
668
+ # Describe configs
669
+ #
670
+ # @param resources [Array<Hash>] Array where elements are hashes with two keys:
671
+ # - `:resource_type` - numerical resource type based on Kafka API
672
+ # - `:resource_name` - string with resource name
673
+ # @return [DescribeConfigsHandle] Describe config handle that can be used to wait for the
674
+ # result of fetching resources with their appropriate configs
675
+ #
676
+ # @raise [RdkafkaError]
677
+ #
678
+ # @note Several resources can be requested at one go, but only one broker at a time
679
+ def describe_configs(resources)
680
+ closed_admin_check(__method__)
681
+
682
+ handle = DescribeConfigsHandle.new
683
+ handle[:pending] = true
684
+ handle[:response] = -1
685
+
686
+ queue_ptr = @native_kafka.with_inner do |inner|
687
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
688
+ end
689
+
690
+ if queue_ptr.null?
691
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
692
+ end
693
+
694
+ admin_options_ptr = @native_kafka.with_inner do |inner|
695
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
696
+ inner,
697
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS
698
+ )
699
+ end
700
+
701
+ DescribeConfigsHandle.register(handle)
702
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
703
+
704
+ pointer_array = resources.map do |resource_details|
705
+ Rdkafka::Bindings.rd_kafka_ConfigResource_new(
706
+ resource_details.fetch(:resource_type),
707
+ FFI::MemoryPointer.from_string(
708
+ resource_details.fetch(:resource_name)
709
+ )
710
+ )
711
+ end
712
+
713
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
714
+ configs_array_ptr.write_array_of_pointer(pointer_array)
715
+
716
+ begin
717
+ @native_kafka.with_inner do |inner|
718
+ Rdkafka::Bindings.rd_kafka_DescribeConfigs(
719
+ inner,
720
+ configs_array_ptr,
721
+ pointer_array.size,
722
+ admin_options_ptr,
723
+ queue_ptr
724
+ )
725
+ end
726
+ rescue Exception
727
+ DescribeConfigsHandle.remove(handle.to_ptr.address)
728
+
729
+ raise
730
+ ensure
731
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
732
+ configs_array_ptr,
733
+ pointer_array.size
734
+ ) if configs_array_ptr
735
+ end
736
+
737
+ handle
738
+ end
739
+
740
+ # Alters in an incremental way all the configs provided for given resources
741
+ #
742
+ # @param resources_with_configs [Array<Hash>] resources with the configs key that contains
743
+ # name, value and the proper op_type to perform on this value.
744
+ #
745
+ # @return [IncrementalAlterConfigsHandle] Incremental alter configs handle that can be used to
746
+ # wait for the result of altering resources with their appropriate configs
747
+ #
748
+ # @raise [RdkafkaError]
749
+ #
750
+ # @note Several resources can be requested at one go, but only one broker at a time
751
+ # @note The results won't contain altered values but only the altered resources
752
+ def incremental_alter_configs(resources_with_configs)
753
+ closed_admin_check(__method__)
754
+
755
+ handle = IncrementalAlterConfigsHandle.new
756
+ handle[:pending] = true
757
+ handle[:response] = -1
758
+
759
+ queue_ptr = @native_kafka.with_inner do |inner|
760
+ Rdkafka::Bindings.rd_kafka_queue_get_background(inner)
761
+ end
762
+
763
+ if queue_ptr.null?
764
+ raise Rdkafka::Config::ConfigError.new("rd_kafka_queue_get_background was NULL")
765
+ end
766
+
767
+ admin_options_ptr = @native_kafka.with_inner do |inner|
768
+ Rdkafka::Bindings.rd_kafka_AdminOptions_new(
769
+ inner,
770
+ Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS
771
+ )
772
+ end
773
+
774
+ IncrementalAlterConfigsHandle.register(handle)
775
+ Rdkafka::Bindings.rd_kafka_AdminOptions_set_opaque(admin_options_ptr, handle.to_ptr)
776
+
777
+ # Tu poprawnie tworzyc
778
+ pointer_array = resources_with_configs.map do |resource_details|
779
+ # First build the appropriate resource representation
780
+ resource_ptr = Rdkafka::Bindings.rd_kafka_ConfigResource_new(
781
+ resource_details.fetch(:resource_type),
782
+ FFI::MemoryPointer.from_string(
783
+ resource_details.fetch(:resource_name)
784
+ )
785
+ )
786
+
787
+ resource_details.fetch(:configs).each do |config|
788
+ Bindings.rd_kafka_ConfigResource_add_incremental_config(
789
+ resource_ptr,
790
+ config.fetch(:name),
791
+ config.fetch(:op_type),
792
+ config.fetch(:value)
793
+ )
794
+ end
795
+
796
+ resource_ptr
797
+ end
798
+
799
+ configs_array_ptr = FFI::MemoryPointer.new(:pointer, pointer_array.size)
800
+ configs_array_ptr.write_array_of_pointer(pointer_array)
801
+
802
+
803
+ begin
804
+ @native_kafka.with_inner do |inner|
805
+ Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs(
806
+ inner,
807
+ configs_array_ptr,
808
+ pointer_array.size,
809
+ admin_options_ptr,
810
+ queue_ptr
811
+ )
812
+ end
813
+ rescue Exception
814
+ IncrementalAlterConfigsHandle.remove(handle.to_ptr.address)
815
+
816
+ raise
817
+ ensure
818
+ Rdkafka::Bindings.rd_kafka_ConfigResource_destroy_array(
819
+ configs_array_ptr,
820
+ pointer_array.size
821
+ ) if configs_array_ptr
822
+ end
823
+
824
+ handle
825
+ end
826
+
623
827
  private
624
828
 
625
829
  def closed_admin_check(method)
@@ -89,10 +89,58 @@ module Rdkafka
89
89
  attach_function :rd_kafka_topic_partition_list_destroy, [:pointer], :void
90
90
  attach_function :rd_kafka_topic_partition_list_copy, [:pointer], :pointer
91
91
 
92
+ # Configs management
93
+ #
94
+ # Structs for management of configurations
95
+ # Each configuration is attached to a resource and one resource can have many configuration
96
+ # details. Each resource will also have separate errors results if obtaining configuration
97
+ # was not possible for any reason
98
+ class ConfigResource < FFI::Struct
99
+ layout :type, :int,
100
+ :name, :string
101
+ end
102
+
103
+ attach_function :rd_kafka_DescribeConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
104
+ attach_function :rd_kafka_ConfigResource_new, [:int32, :pointer], :pointer
105
+ attach_function :rd_kafka_ConfigResource_destroy_array, [:pointer, :int32], :void
106
+ attach_function :rd_kafka_event_DescribeConfigs_result, [:pointer], :pointer
107
+ attach_function :rd_kafka_DescribeConfigs_result_resources, [:pointer, :pointer], :pointer
108
+ attach_function :rd_kafka_ConfigResource_configs, [:pointer, :pointer], :pointer
109
+ attach_function :rd_kafka_ConfigEntry_name, [:pointer], :string
110
+ attach_function :rd_kafka_ConfigEntry_value, [:pointer], :string
111
+ attach_function :rd_kafka_ConfigEntry_is_read_only, [:pointer], :int
112
+ attach_function :rd_kafka_ConfigEntry_is_default, [:pointer], :int
113
+ attach_function :rd_kafka_ConfigEntry_is_sensitive, [:pointer], :int
114
+ attach_function :rd_kafka_ConfigEntry_is_synonym, [:pointer], :int
115
+ attach_function :rd_kafka_ConfigEntry_synonyms, [:pointer, :pointer], :pointer
116
+ attach_function :rd_kafka_ConfigResource_error, [:pointer], :int
117
+ attach_function :rd_kafka_ConfigResource_error_string, [:pointer], :string
118
+ attach_function :rd_kafka_IncrementalAlterConfigs, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
119
+ attach_function :rd_kafka_IncrementalAlterConfigs_result_resources, [:pointer, :pointer], :pointer
120
+ attach_function :rd_kafka_ConfigResource_add_incremental_config, [:pointer, :string, :int32, :string], :pointer
121
+ attach_function :rd_kafka_event_IncrementalAlterConfigs_result, [:pointer], :pointer
122
+
123
+ RD_KAFKA_ADMIN_OP_DESCRIBECONFIGS = 5
124
+ RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT = 104
125
+
126
+ RD_KAFKA_ADMIN_OP_INCREMENTALALTERCONFIGS = 16
127
+ RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT = 131072
128
+
129
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SET = 0
130
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_DELETE = 1
131
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_APPEND = 2
132
+ RD_KAFKA_ALTER_CONFIG_OP_TYPE_SUBTRACT = 3
133
+
92
134
  # Errors
135
+ class NativeErrorDesc < FFI::Struct
136
+ layout :code, :int,
137
+ :name, :pointer,
138
+ :desc, :pointer
139
+ end
93
140
 
94
141
  attach_function :rd_kafka_err2name, [:int], :string
95
142
  attach_function :rd_kafka_err2str, [:int], :string
143
+ attach_function :rd_kafka_get_err_descs, [:pointer, :pointer], :void
96
144
 
97
145
  # Configuration
98
146
 
@@ -119,6 +167,9 @@ module Rdkafka
119
167
  # Log queue
120
168
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
121
169
  attach_function :rd_kafka_queue_get_main, [:pointer], :pointer
170
+ # Per topic configs
171
+ attach_function :rd_kafka_topic_conf_new, [], :pointer
172
+ attach_function :rd_kafka_topic_conf_set, [:pointer, :string, :string, :pointer, :int], :kafka_config_response
122
173
 
123
174
  LogCallback = FFI::Function.new(
124
175
  :void, [:pointer, :int, :string, :string]