karafka-rdkafka 0.21.0.rc1-arm64-darwin → 0.22.0-arm64-darwin

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +15 -2
  3. data/README.md +36 -135
  4. data/docker-compose-ssl.yml +35 -0
  5. data/ext/librdkafka.dylib +0 -0
  6. data/karafka-rdkafka.gemspec +35 -6
  7. data/lib/rdkafka/bindings.rb +0 -1
  8. data/lib/rdkafka/consumer.rb +1 -1
  9. data/lib/rdkafka/version.rb +3 -3
  10. data/renovate.json +5 -17
  11. metadata +32 -52
  12. data/.github/CODEOWNERS +0 -3
  13. data/.github/FUNDING.yml +0 -1
  14. data/.github/workflows/ci_linux_x86_64_gnu.yml +0 -271
  15. data/.github/workflows/ci_linux_x86_64_musl.yml +0 -194
  16. data/.github/workflows/ci_macos_arm64.yml +0 -284
  17. data/.github/workflows/push_linux_x86_64_gnu.yml +0 -65
  18. data/.github/workflows/push_linux_x86_64_musl.yml +0 -79
  19. data/.github/workflows/push_macos_arm64.yml +0 -54
  20. data/.github/workflows/push_ruby.yml +0 -37
  21. data/.github/workflows/verify-action-pins.yml +0 -16
  22. data/.gitignore +0 -15
  23. data/.rspec +0 -2
  24. data/.ruby-gemset +0 -1
  25. data/.ruby-version +0 -1
  26. data/.yardopts +0 -2
  27. data/ext/README.md +0 -19
  28. data/ext/Rakefile +0 -131
  29. data/ext/build_common.sh +0 -361
  30. data/ext/build_linux_x86_64_gnu.sh +0 -306
  31. data/ext/build_linux_x86_64_musl.sh +0 -763
  32. data/ext/build_macos_arm64.sh +0 -550
  33. data/spec/rdkafka/abstract_handle_spec.rb +0 -117
  34. data/spec/rdkafka/admin/create_acl_handle_spec.rb +0 -56
  35. data/spec/rdkafka/admin/create_acl_report_spec.rb +0 -18
  36. data/spec/rdkafka/admin/create_topic_handle_spec.rb +0 -54
  37. data/spec/rdkafka/admin/create_topic_report_spec.rb +0 -16
  38. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +0 -85
  39. data/spec/rdkafka/admin/delete_acl_report_spec.rb +0 -72
  40. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +0 -54
  41. data/spec/rdkafka/admin/delete_topic_report_spec.rb +0 -16
  42. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +0 -85
  43. data/spec/rdkafka/admin/describe_acl_report_spec.rb +0 -73
  44. data/spec/rdkafka/admin_spec.rb +0 -970
  45. data/spec/rdkafka/bindings_spec.rb +0 -198
  46. data/spec/rdkafka/callbacks_spec.rb +0 -20
  47. data/spec/rdkafka/config_spec.rb +0 -258
  48. data/spec/rdkafka/consumer/headers_spec.rb +0 -73
  49. data/spec/rdkafka/consumer/message_spec.rb +0 -139
  50. data/spec/rdkafka/consumer/partition_spec.rb +0 -57
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +0 -248
  52. data/spec/rdkafka/consumer_spec.rb +0 -1296
  53. data/spec/rdkafka/error_spec.rb +0 -95
  54. data/spec/rdkafka/metadata_spec.rb +0 -79
  55. data/spec/rdkafka/native_kafka_spec.rb +0 -130
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +0 -60
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +0 -25
  58. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +0 -359
  59. data/spec/rdkafka/producer_spec.rb +0 -1526
  60. data/spec/spec_helper.rb +0 -193
data/spec/spec_helper.rb DELETED
@@ -1,193 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- unless ENV["CI"] == "true"
4
- require "simplecov"
5
- SimpleCov.start do
6
- add_filter "/spec/"
7
- end
8
- end
9
-
10
- require "pry"
11
- require "rspec"
12
- require "rdkafka"
13
- require "timeout"
14
- require "securerandom"
15
-
16
- def rdkafka_base_config
17
- {
18
- :"bootstrap.servers" => "localhost:9092",
19
- # Display statistics and refresh often just to cover those in specs
20
- :'statistics.interval.ms' => 1_000,
21
- :'topic.metadata.refresh.interval.ms' => 1_000
22
- }
23
- end
24
-
25
- def rdkafka_config(config_overrides={})
26
- # Generate the base config
27
- config = rdkafka_base_config
28
- # Merge overrides
29
- config.merge!(config_overrides)
30
- # Return it
31
- Rdkafka::Config.new(config)
32
- end
33
-
34
- def rdkafka_consumer_config(config_overrides={})
35
- # Generate the base config
36
- config = rdkafka_base_config
37
- # Add consumer specific fields to it
38
- config[:"auto.offset.reset"] = "earliest"
39
- config[:"enable.partition.eof"] = false
40
- config[:"group.id"] = "ruby-test-#{SecureRandom.uuid}"
41
- # Enable debug mode if required
42
- if ENV["DEBUG_CONSUMER"]
43
- config[:debug] = "cgrp,topic,fetch"
44
- end
45
- # Merge overrides
46
- config.merge!(config_overrides)
47
- # Return it
48
- Rdkafka::Config.new(config)
49
- end
50
-
51
- def rdkafka_producer_config(config_overrides={})
52
- # Generate the base config
53
- config = rdkafka_base_config
54
- # Enable debug mode if required
55
- if ENV["DEBUG_PRODUCER"]
56
- config[:debug] = "broker,topic,msg"
57
- end
58
- # Merge overrides
59
- config.merge!(config_overrides)
60
- # Return it
61
- Rdkafka::Config.new(config)
62
- end
63
-
64
- def new_native_client
65
- config = rdkafka_consumer_config
66
- config.send(:native_kafka, config.send(:native_config), :rd_kafka_producer)
67
- end
68
-
69
- def new_native_topic(topic_name="topic_name", native_client: )
70
- Rdkafka::Bindings.rd_kafka_topic_new(
71
- native_client,
72
- topic_name,
73
- nil
74
- )
75
- end
76
-
77
- def wait_for_message(topic:, delivery_report:, timeout_in_seconds: 30, consumer: nil)
78
- new_consumer = consumer.nil?
79
- consumer ||= rdkafka_consumer_config('allow.auto.create.topics': true).consumer
80
- consumer.subscribe(topic)
81
- timeout = Time.now.to_i + timeout_in_seconds
82
- retry_count = 0
83
- max_retries = 10
84
-
85
- loop do
86
- if timeout <= Time.now.to_i
87
- raise "Timeout of #{timeout_in_seconds} seconds reached in wait_for_message"
88
- end
89
-
90
- begin
91
- message = consumer.poll(100)
92
- if message &&
93
- message.partition == delivery_report.partition &&
94
- message.offset == delivery_report.offset
95
- return message
96
- end
97
- rescue Rdkafka::RdkafkaError => e
98
- if e.code == :unknown_topic_or_part && retry_count < max_retries
99
- retry_count += 1
100
- sleep(0.1) # Small delay before retry
101
- next
102
- else
103
- raise
104
- end
105
- end
106
- end
107
- ensure
108
- consumer.close if new_consumer
109
- end
110
-
111
- def wait_for_assignment(consumer)
112
- 10.times do
113
- break if !consumer.assignment.empty?
114
- sleep 1
115
- end
116
- end
117
-
118
- def wait_for_unassignment(consumer)
119
- 10.times do
120
- break if consumer.assignment.empty?
121
- sleep 1
122
- end
123
- end
124
-
125
- def notify_listener(listener, &block)
126
- # 1. subscribe and poll
127
- consumer.subscribe("consume_test_topic")
128
- wait_for_assignment(consumer)
129
- consumer.poll(100)
130
-
131
- block.call if block
132
-
133
- # 2. unsubscribe
134
- consumer.unsubscribe
135
- wait_for_unassignment(consumer)
136
- consumer.close
137
- end
138
-
139
- RSpec.configure do |config|
140
- config.filter_run focus: true
141
- config.run_all_when_everything_filtered = true
142
-
143
- config.before(:each) do
144
- Rdkafka::Config.statistics_callback = nil
145
- # We need to clear it so state does not leak between specs
146
- Rdkafka::Producer.partitions_count_cache.to_h.clear
147
- end
148
-
149
- config.before(:suite) do
150
- admin = rdkafka_config.admin
151
- {
152
- consume_test_topic: 3,
153
- empty_test_topic: 3,
154
- load_test_topic: 3,
155
- produce_test_topic: 3,
156
- rake_test_topic: 3,
157
- watermarks_test_topic: 3,
158
- partitioner_test_topic: 25,
159
- example_topic: 1
160
- }.each do |topic, partitions|
161
- create_topic_handle = admin.create_topic(topic.to_s, partitions, 1)
162
- begin
163
- create_topic_handle.wait(max_wait_timeout: 1.0)
164
- rescue Rdkafka::RdkafkaError => ex
165
- raise unless ex.message.match?(/topic_already_exists/)
166
- end
167
- end
168
- admin.close
169
- end
170
-
171
- config.around(:each) do |example|
172
- # Timeout specs after a minute. If they take longer
173
- # they are probably stuck
174
- Timeout::timeout(60) do
175
- example.run
176
- end
177
- end
178
- end
179
-
180
- class RdKafkaTestConsumer
181
- def self.with
182
- consumer = Rdkafka::Bindings.rd_kafka_new(
183
- :rd_kafka_consumer,
184
- nil,
185
- nil,
186
- 0
187
- )
188
- yield consumer
189
- ensure
190
- Rdkafka::Bindings.rd_kafka_consumer_close(consumer)
191
- Rdkafka::Bindings.rd_kafka_destroy(consumer)
192
- end
193
- end