rdkafka 0.22.2 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +38 -3
  3. data/Gemfile +9 -0
  4. data/README.md +17 -14
  5. data/bin/verify_kafka_warnings +37 -0
  6. data/dist/{librdkafka-2.8.0.tar.gz → librdkafka-2.12.1.tar.gz} +0 -0
  7. data/docker-compose-ssl.yml +35 -0
  8. data/docker-compose.yml +1 -1
  9. data/lib/rdkafka/abstract_handle.rb +23 -5
  10. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  11. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  12. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  13. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  15. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  16. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  17. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  19. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  20. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  21. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  23. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  25. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  27. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  29. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  30. data/lib/rdkafka/admin.rb +108 -113
  31. data/lib/rdkafka/bindings.rb +76 -30
  32. data/lib/rdkafka/callbacks.rb +71 -11
  33. data/lib/rdkafka/config.rb +21 -12
  34. data/lib/rdkafka/consumer/headers.rb +3 -2
  35. data/lib/rdkafka/consumer/message.rb +7 -3
  36. data/lib/rdkafka/consumer/partition.rb +6 -2
  37. data/lib/rdkafka/consumer/topic_partition_list.rb +11 -7
  38. data/lib/rdkafka/consumer.rb +41 -29
  39. data/lib/rdkafka/defaults.rb +106 -0
  40. data/lib/rdkafka/error.rb +16 -1
  41. data/lib/rdkafka/helpers/oauth.rb +45 -12
  42. data/lib/rdkafka/metadata.rb +29 -5
  43. data/lib/rdkafka/native_kafka.rb +26 -2
  44. data/lib/rdkafka/producer/delivery_report.rb +8 -4
  45. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  46. data/lib/rdkafka/producer.rb +52 -26
  47. data/lib/rdkafka/version.rb +6 -3
  48. data/lib/rdkafka.rb +1 -0
  49. data/rdkafka.gemspec +35 -13
  50. data/renovate.json +6 -25
  51. metadata +23 -124
  52. data/.github/CODEOWNERS +0 -3
  53. data/.github/FUNDING.yml +0 -1
  54. data/.github/workflows/ci_linux_x86_64_gnu.yml +0 -271
  55. data/.github/workflows/ci_linux_x86_64_musl.yml +0 -194
  56. data/.github/workflows/ci_macos_arm64.yml +0 -284
  57. data/.github/workflows/push_linux_x86_64_gnu.yml +0 -65
  58. data/.github/workflows/push_linux_x86_64_musl.yml +0 -79
  59. data/.github/workflows/push_macos_arm64.yml +0 -54
  60. data/.github/workflows/push_ruby.yml +0 -37
  61. data/.github/workflows/verify-action-pins.yml +0 -16
  62. data/.gitignore +0 -14
  63. data/.rspec +0 -2
  64. data/.ruby-gemset +0 -1
  65. data/.ruby-version +0 -1
  66. data/.yardopts +0 -2
  67. data/ext/README.md +0 -19
  68. data/ext/build_common.sh +0 -361
  69. data/ext/build_linux_x86_64_gnu.sh +0 -306
  70. data/ext/build_linux_x86_64_musl.sh +0 -763
  71. data/ext/build_macos_arm64.sh +0 -550
  72. data/spec/rdkafka/abstract_handle_spec.rb +0 -117
  73. data/spec/rdkafka/admin/create_acl_handle_spec.rb +0 -56
  74. data/spec/rdkafka/admin/create_acl_report_spec.rb +0 -18
  75. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -52
  76. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -16
  77. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +0 -85
  78. data/spec/rdkafka/admin/delete_acl_report_spec.rb +0 -72
  79. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -52
  80. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -16
  81. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +0 -85
  82. data/spec/rdkafka/admin/describe_acl_report_spec.rb +0 -73
  83. data/spec/rdkafka/admin_spec.rb +0 -971
  84. data/spec/rdkafka/bindings_spec.rb +0 -199
  85. data/spec/rdkafka/callbacks_spec.rb +0 -20
  86. data/spec/rdkafka/config_spec.rb +0 -258
  87. data/spec/rdkafka/consumer/headers_spec.rb +0 -73
  88. data/spec/rdkafka/consumer/message_spec.rb +0 -139
  89. data/spec/rdkafka/consumer/partition_spec.rb +0 -57
  90. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -248
  91. data/spec/rdkafka/consumer_spec.rb +0 -1274
  92. data/spec/rdkafka/error_spec.rb +0 -89
  93. data/spec/rdkafka/metadata_spec.rb +0 -79
  94. data/spec/rdkafka/native_kafka_spec.rb +0 -130
  95. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -45
  96. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -25
  97. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +0 -359
  98. data/spec/rdkafka/producer_spec.rb +0 -1345
  99. data/spec/spec_helper.rb +0 -195
@@ -12,8 +12,8 @@ module Rdkafka
12
12
  # @return [Integer]
13
13
  attr_reader :offset
14
14
 
15
- # The name of the topic this message was produced to or nil in case of reports with errors
16
- # where topic was not reached.
15
+ # The name of the topic this message was produced to or nil in case delivery failed and we
16
+ # we not able to get the topic reference
17
17
  #
18
18
  # @return [String, nil]
19
19
  attr_reader :topic_name
@@ -32,8 +32,12 @@ module Rdkafka
32
32
  # We do not remove the original `#topic_name` because of backwards compatibility
33
33
  alias topic topic_name
34
34
 
35
- private
36
-
35
+ # @private
36
+ # @param partition [Integer] partition number
37
+ # @param offset [Integer] message offset
38
+ # @param topic_name [String, nil] topic name
39
+ # @param error [Integer, nil] error code if any
40
+ # @param label [Object, nil] user-defined label
37
41
  def initialize(partition, offset, topic_name = nil, error = nil, label = nil)
38
42
  @partition = partition
39
43
  @offset = offset
@@ -17,9 +17,9 @@ module Rdkafka
17
17
  #
18
18
  # 2. Edge case handling
19
19
  # If a user configures `statistics.interval.ms` much higher than the default cache TTL
20
- # (30 seconds), the cache will still function correctly. When statistics updates don't
21
- # occur frequently enough, the cache entries will expire naturally, triggering a
22
- # blocking refresh when needed.
20
+ # ({Defaults::PARTITIONS_COUNT_CACHE_TTL_MS}ms), the cache will still function correctly.
21
+ # When statistics updates don't occur frequently enough, the cache entries will expire
22
+ # naturally, triggering a blocking refresh when needed.
23
23
  #
24
24
  # 3. User configuration awareness
25
25
  # The cache respects user-defined settings. If `topic.metadata.refresh.interval.ms` is
@@ -46,22 +46,32 @@ module Rdkafka
46
46
  class PartitionsCountCache
47
47
  include Helpers::Time
48
48
 
49
- # Default time-to-live for cached partition counts in seconds
50
- #
51
- # @note This default was chosen to balance freshness of metadata with performance
52
- # optimization. Most Kafka cluster topology changes are planned operations, making 30
53
- # seconds a reasonable compromise.
54
- DEFAULT_TTL = 30
55
-
56
49
  # Creates a new partition count cache
57
50
  #
58
- # @param ttl [Integer] Time-to-live in seconds for cached values
59
- def initialize(ttl = DEFAULT_TTL)
51
+ # @param ttl [Integer, nil] DEPRECATED: Use ttl_ms instead.
52
+ # Time-to-live in seconds for cached values. Will be removed in v1.0.0.
53
+ # @param ttl_ms [Integer, nil] Time-to-live in milliseconds for cached values.
54
+ # Defaults to {Defaults::PARTITIONS_COUNT_CACHE_TTL_MS}.
55
+ def initialize(ttl = :not_provided, ttl_ms: :not_provided)
60
56
  @counts = {}
61
57
  @mutex_hash = {}
62
58
  # Used only for @mutex_hash access to ensure thread-safety when creating new mutexes
63
59
  @mutex_for_hash = Mutex.new
64
- @ttl = ttl
60
+
61
+ # Determine which TTL value to use
62
+ if ttl != :not_provided && ttl_ms != :not_provided
63
+ warn "DEPRECATION WARNING: Both ttl and ttl_ms were provided to PartitionsCountCache. " \
64
+ "Using ttl_ms. The ttl parameter is deprecated and will be removed in v1.0.0."
65
+ @ttl_ms = ttl_ms
66
+ elsif ttl != :not_provided
67
+ warn "DEPRECATION WARNING: ttl (seconds) parameter for PartitionsCountCache is deprecated. " \
68
+ "Use ttl_ms (milliseconds) instead. This parameter will be removed in v1.0.0."
69
+ @ttl_ms = (ttl * 1000).to_i
70
+ elsif ttl_ms == :not_provided
71
+ @ttl_ms = Defaults::PARTITIONS_COUNT_CACHE_TTL_MS
72
+ else
73
+ @ttl_ms = ttl_ms
74
+ end
65
75
  end
66
76
 
67
77
  # Reads partition count for a topic with automatic refresh when expired
@@ -209,7 +219,7 @@ module Rdkafka
209
219
  # @param timestamp [Float] Monotonic timestamp to check
210
220
  # @return [Boolean] true if expired, false otherwise
211
221
  def expired?(timestamp)
212
- monotonic_now - timestamp > @ttl
222
+ (monotonic_now - timestamp) * 1_000 > @ttl_ms
213
223
  end
214
224
  end
215
225
  end
@@ -6,9 +6,6 @@ module Rdkafka
6
6
  include Helpers::Time
7
7
  include Helpers::OAuth
8
8
 
9
- # Empty hash used as a default
10
- EMPTY_HASH = {}.freeze
11
-
12
9
  # @private
13
10
  @@partitions_count_cache = PartitionsCountCache.new
14
11
 
@@ -17,10 +14,9 @@ module Rdkafka
17
14
  # then. Since the partitions count can only grow and should be same for all consumers and
18
15
  # producers, we can use a global cache as long as we ensure that updates only move up.
19
16
  #
17
+ # @return [Rdkafka::Producer::PartitionsCountCache]
20
18
  # @note It is critical to remember, that not all users may have statistics callbacks enabled,
21
19
  # hence we should not make assumption that this cache is always updated from the stats.
22
- #
23
- # @return [Rdkafka::Producer::PartitionsCountCache]
24
20
  def self.partitions_count_cache
25
21
  @@partitions_count_cache
26
22
  end
@@ -30,6 +26,9 @@ module Rdkafka
30
26
  @@partitions_count_cache = partitions_count_cache
31
27
  end
32
28
 
29
+ # Empty hash used as a default
30
+ EMPTY_HASH = {}.freeze
31
+
33
32
  private_constant :EMPTY_HASH
34
33
 
35
34
  # Raised when there was a critical issue when invoking rd_kafka_topic_new
@@ -64,12 +63,13 @@ module Rdkafka
64
63
  end
65
64
 
66
65
  # Sets alternative set of configuration details that can be set per topic
67
- # @note It is not allowed to re-set the same topic config twice because of the underlying
68
- # librdkafka caching
66
+ #
69
67
  # @param topic [String] The topic name
70
68
  # @param config [Hash] config we want to use per topic basis
71
69
  # @param config_hash [Integer] hash of the config. We expect it here instead of computing it,
72
70
  # because it is already computed during the retrieval attempt in the `#produce` flow.
71
+ # @note It is not allowed to re-set the same topic config twice because of the underlying
72
+ # librdkafka caching
73
73
  def set_topic_config(topic, config, config_hash)
74
74
  # Ensure lock on topic reference just in case
75
75
  @native_kafka.with_inner do |inner|
@@ -125,8 +125,7 @@ module Rdkafka
125
125
  # Set a callback that will be called every time a message is successfully produced.
126
126
  # The callback is called with a {DeliveryReport} and {DeliveryHandle}
127
127
  #
128
- # @param callback [Proc, #call] The callback
129
- #
128
+ # @param callback [Proc, #call] callable object to handle delivery reports
130
129
  # @return [nil]
131
130
  def delivery_callback=(callback)
132
131
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
@@ -168,7 +167,7 @@ module Rdkafka
168
167
  # should be no other errors.
169
168
  #
170
169
  # @note For `timed_out` we do not raise an error to keep it backwards compatible
171
- def flush(timeout_ms=5_000)
170
+ def flush(timeout_ms = Defaults::PRODUCER_FLUSH_TIMEOUT_MS)
172
171
  closed_producer_check(__method__)
173
172
 
174
173
  code = nil
@@ -208,11 +207,37 @@ module Rdkafka
208
207
  code.zero? || raise(Rdkafka::RdkafkaError.new(code))
209
208
 
210
209
  # Wait for the purge to affect everything
211
- sleep(0.001) until flush(100)
210
+ sleep(Defaults::PRODUCER_PURGE_SLEEP_INTERVAL_MS / 1000.0) until flush(Defaults::PRODUCER_PURGE_FLUSH_TIMEOUT_MS)
212
211
 
213
212
  true
214
213
  end
215
214
 
215
+ # Returns the number of messages and requests waiting to be sent to the broker as well as
216
+ # delivery reports queued for the application.
217
+ #
218
+ # This provides visibility into the producer's internal queue depth, useful for:
219
+ # - Monitoring producer backpressure
220
+ # - Implementing custom flow control
221
+ # - Debugging message delivery issues
222
+ # - Graceful shutdown logic (wait until queue is empty)
223
+ #
224
+ # @return [Integer] the number of messages in the queue
225
+ # @raise [Rdkafka::ClosedProducerError] if called on a closed producer
226
+ #
227
+ # @note This method is thread-safe as it uses the @native_kafka.with_inner synchronization
228
+ #
229
+ # @example
230
+ # producer.queue_size #=> 42
231
+ def queue_size
232
+ closed_producer_check(__method__)
233
+
234
+ @native_kafka.with_inner do |inner|
235
+ Rdkafka::Bindings.rd_kafka_outq_len(inner)
236
+ end
237
+ end
238
+
239
+ alias queue_length queue_size
240
+
216
241
  # Partition count for a given topic.
217
242
  #
218
243
  # @param topic [String] The topic name.
@@ -237,13 +262,13 @@ module Rdkafka
237
262
  topic_metadata = ::Rdkafka::Metadata.new(inner, topic).topics&.first
238
263
  end
239
264
 
240
- topic_metadata ? topic_metadata[:partition_count] : -1
265
+ topic_metadata ? topic_metadata[:partition_count] : Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
241
266
  end
242
267
  rescue Rdkafka::RdkafkaError => e
243
268
  # If the topic does not exist, it will be created or if not allowed another error will be
244
- # raised. We here return -1 so this can happen without early error happening on metadata
245
- # discovery.
246
- return -1 if e.code == :unknown_topic_or_part
269
+ # raised. We here return RD_KAFKA_PARTITION_UA so this can happen without early error
270
+ # happening on metadata discovery.
271
+ return Rdkafka::Bindings::RD_KAFKA_PARTITION_UA if e.code == :unknown_topic_or_part
247
272
 
248
273
  raise(e)
249
274
  end
@@ -254,14 +279,15 @@ module Rdkafka
254
279
  # When a timestamp is provided this is used instead of the auto-generated timestamp.
255
280
  #
256
281
  # @param topic [String] The topic to produce to
257
- # @param payload [String,nil] The message's payload
258
- # @param key [String, nil] The message's key
259
- # @param partition [Integer,nil] Optional partition to produce to
282
+ # @param payload [String, nil]
283
+ # @param key [String, nil]
284
+ # @param partition [Integer, nil] Optional partition to produce to
260
285
  # @param partition_key [String, nil] Optional partition key based on which partition assignment can happen
261
- # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
262
- # @param headers [Hash<String,String|Array<String>>] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
286
+ # @param timestamp [Time, Integer, nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
287
+ # @param headers [Hash{String => String, Array<String>}] Optional message headers. Values can be either a single string or an array of strings to support duplicate headers per KIP-82
263
288
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
264
289
  # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
290
+ # @param partitioner [String] name of the partitioner to use
265
291
  #
266
292
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
267
293
  #
@@ -318,9 +344,9 @@ module Rdkafka
318
344
  selected_partitioner) if partition_count.positive?
319
345
  end
320
346
 
321
- # If partition is nil, use -1 to let librdafka set the partition randomly or
347
+ # If partition is nil, use RD_KAFKA_PARTITION_UA to let librdafka set the partition randomly or
322
348
  # based on the key when present.
323
- partition ||= -1
349
+ partition ||= Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
324
350
 
325
351
  # If timestamp is nil use 0 and let Kafka set one. If an integer or time
326
352
  # use it.
@@ -338,9 +364,9 @@ module Rdkafka
338
364
  delivery_handle.label = label
339
365
  delivery_handle.topic = topic
340
366
  delivery_handle[:pending] = true
341
- delivery_handle[:response] = -1
342
- delivery_handle[:partition] = -1
343
- delivery_handle[:offset] = -1
367
+ delivery_handle[:response] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
368
+ delivery_handle[:partition] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
369
+ delivery_handle[:offset] = Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
344
370
  DeliveryHandle.register(delivery_handle)
345
371
 
346
372
  args = [
@@ -387,7 +413,7 @@ module Rdkafka
387
413
  end
388
414
 
389
415
  # Raise error if the produce call was not successful
390
- if response != 0
416
+ if response != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
391
417
  DeliveryHandle.remove(delivery_handle.to_ptr.address)
392
418
  raise RdkafkaError.new(response)
393
419
  end
@@ -1,7 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.22.2"
5
- LIBRDKAFKA_VERSION = "2.8.0"
6
- LIBRDKAFKA_SOURCE_SHA256 = "5bd1c46f63265f31c6bfcedcde78703f77d28238eadf23821c2b43fc30be3e25"
4
+ # Current rdkafka-ruby gem version
5
+ VERSION = "0.25.0"
6
+ # Target librdkafka version to be used
7
+ LIBRDKAFKA_VERSION = "2.12.1"
8
+ # SHA256 hash of the librdkafka source tarball for verification
9
+ LIBRDKAFKA_SOURCE_SHA256 = "ec103fa05cb0f251e375f6ea0b6112cfc9d0acd977dc5b69fdc54242ba38a16f"
7
10
  end
data/lib/rdkafka.rb CHANGED
@@ -8,6 +8,7 @@ require "json"
8
8
  require "rdkafka/version"
9
9
  require "rdkafka/helpers/time"
10
10
  require "rdkafka/helpers/oauth"
11
+ require "rdkafka/defaults"
11
12
  require "rdkafka/abstract_handle"
12
13
  require "rdkafka/admin"
13
14
  require "rdkafka/admin/create_topic_handle"
data/rdkafka.gemspec CHANGED
@@ -13,17 +13,31 @@ Gem::Specification.new do |gem|
13
13
  gem.name = 'rdkafka'
14
14
  gem.require_paths = ['lib']
15
15
  gem.version = Rdkafka::VERSION
16
- gem.required_ruby_version = '>= 3.1'
16
+ gem.required_ruby_version = '>= 3.2'
17
+
18
+ files = `git ls-files`.split($\)
19
+ files = files.reject do |file|
20
+ next true if file.start_with?('.')
21
+ next true if file.start_with?('spec/')
22
+ next true if file.start_with?('ext/README.md')
23
+
24
+ false
25
+ end
17
26
 
18
27
  if ENV['RUBY_PLATFORM']
19
28
  gem.platform = ENV['RUBY_PLATFORM']
20
- gem.files = `git ls-files`.split($\)
21
29
 
22
30
  # Do not include the source code for librdkafka as it should be precompiled already per
23
31
  # platform. Same applies to any possible patches.
24
- gem.files = gem.files.reject do |file|
25
- file.match?(%r{^dist/librdkafka-.*\.tar\.gz$}) ||
26
- file.match?(%r{^dist/patches/})
32
+ # Do not include github actions details in RubyGems releases
33
+ gem.files = files.reject do |file|
34
+ next true if file.start_with?('dist/')
35
+ next true if file.start_with?('ext/build_')
36
+ next true if file.start_with?('ext/ci_')
37
+ next true if file.start_with?('ext/Rakefile')
38
+ next true if file.start_with?('ext/generate-')
39
+
40
+ false
27
41
  end
28
42
 
29
43
  # Add the compiled extensions that exist (not in git)
@@ -36,21 +50,29 @@ Gem::Specification.new do |gem|
36
50
  end
37
51
  else
38
52
  gem.platform = Gem::Platform::RUBY
39
- gem.files = `git ls-files`.split($\)
53
+
54
+ # Do not include code used for building native extensions
55
+ # Do not include github actions details in RubyGems releases
56
+ gem.files = files.reject do |file|
57
+ next true if file.start_with?('ext/build_')
58
+ next true if file.start_with?('ext/ci_')
59
+ next true if file.start_with?('ext/generate-')
60
+ next false unless file.start_with?('dist/')
61
+ next false if file.start_with?('dist/patches')
62
+ next false if file.start_with?('dist/librdkafka-')
63
+
64
+ true
65
+ end
66
+
40
67
  gem.extensions = %w(ext/Rakefile)
41
68
  end
42
69
 
43
- gem.add_dependency 'ffi', '~> 1.15'
70
+ gem.add_dependency 'ffi', '~> 1.17.1'
71
+ gem.add_dependency 'json', '> 2.0'
44
72
  gem.add_dependency 'logger'
45
73
  gem.add_dependency 'mini_portile2', '~> 2.6'
46
74
  gem.add_dependency 'rake', '> 12'
47
75
 
48
- gem.add_development_dependency 'ostruct'
49
- gem.add_development_dependency 'pry'
50
- gem.add_development_dependency 'rspec', '~> 3.5'
51
- gem.add_development_dependency 'rake'
52
- gem.add_development_dependency 'simplecov'
53
-
54
76
  gem.metadata = {
55
77
  'funding_uri' => 'https://karafka.io/#become-pro',
56
78
  'homepage_uri' => 'https://karafka.io',
data/renovate.json CHANGED
@@ -3,23 +3,16 @@
3
3
  "extends": [
4
4
  "config:recommended"
5
5
  ],
6
+ "minimumReleaseAge": "7 days",
6
7
  "github-actions": {
7
8
  "enabled": true,
8
9
  "pinDigests": true
9
10
  },
10
- "packageRules": [
11
- {
12
- "matchManagers": [
13
- "github-actions"
14
- ],
15
- "minimumReleaseAge": "7 days"
16
- }
17
- ],
18
11
  "customManagers": [
19
12
  {
20
13
  "customType": "regex",
21
14
  "managerFilePatterns": [
22
- "/^ext/build_common\\.sh$/"
15
+ "/ext/build_common.sh/"
23
16
  ],
24
17
  "matchStrings": [
25
18
  "readonly OPENSSL_VERSION=\"(?<currentValue>.*)\""
@@ -31,7 +24,7 @@
31
24
  {
32
25
  "customType": "regex",
33
26
  "managerFilePatterns": [
34
- "/^ext/build_common\\.sh$/"
27
+ "/ext/build_common.sh/"
35
28
  ],
36
29
  "matchStrings": [
37
30
  "readonly CYRUS_SASL_VERSION=\"(?<currentValue>.*)\""
@@ -43,7 +36,7 @@
43
36
  {
44
37
  "customType": "regex",
45
38
  "managerFilePatterns": [
46
- "/^ext/build_common\\.sh$/"
39
+ "/ext/build_common.sh/"
47
40
  ],
48
41
  "matchStrings": [
49
42
  "readonly ZLIB_VERSION=\"(?<currentValue>.*)\""
@@ -55,7 +48,7 @@
55
48
  {
56
49
  "customType": "regex",
57
50
  "managerFilePatterns": [
58
- "/^ext/build_common\\.sh$/"
51
+ "/ext/build_common.sh/"
59
52
  ],
60
53
  "matchStrings": [
61
54
  "readonly ZSTD_VERSION=\"(?<currentValue>.*)\""
@@ -67,7 +60,7 @@
67
60
  {
68
61
  "customType": "regex",
69
62
  "managerFilePatterns": [
70
- "/^ext/build_common\\.sh$/"
63
+ "/ext/build_common.sh/"
71
64
  ],
72
65
  "matchStrings": [
73
66
  "readonly KRB5_VERSION=\"(?<currentValue>.*)\""
@@ -75,18 +68,6 @@
75
68
  "depNameTemplate": "krb5/krb5",
76
69
  "datasourceTemplate": "github-releases",
77
70
  "extractVersionTemplate": "^krb5-(?<version>.*)$"
78
- },
79
- {
80
- "customType": "regex",
81
- "managerFilePatterns": [
82
- "/^ext/build_common\\.sh$/"
83
- ],
84
- "matchStrings": [
85
- "readonly LIBRDKAFKA_VERSION=\"(?<currentValue>.*)\""
86
- ],
87
- "depNameTemplate": "confluentinc/librdkafka",
88
- "datasourceTemplate": "github-releases",
89
- "extractVersionTemplate": "^v(?<version>.*)$"
90
71
  }
91
72
  ]
92
73
  }