karafka-rdkafka 0.21.0-aarch64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (119) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci_linux_alpine_x86_64_musl.yml +197 -0
  5. data/.github/workflows/ci_linux_alpine_x86_64_musl_complementary.yml +264 -0
  6. data/.github/workflows/ci_linux_debian_x86_64_gnu.yml +271 -0
  7. data/.github/workflows/ci_linux_debian_x86_64_gnu_complementary.yml +334 -0
  8. data/.github/workflows/ci_linux_ubuntu_aarch64_gnu.yml +271 -0
  9. data/.github/workflows/ci_linux_ubuntu_aarch64_gnu_complementary.yml +295 -0
  10. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +281 -0
  11. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu_complementary.yml +294 -0
  12. data/.github/workflows/ci_macos_arm64.yml +284 -0
  13. data/.github/workflows/push_linux_aarch64_gnu.yml +65 -0
  14. data/.github/workflows/push_linux_x86_64_gnu.yml +65 -0
  15. data/.github/workflows/push_linux_x86_64_musl.yml +79 -0
  16. data/.github/workflows/push_macos_arm64.yml +54 -0
  17. data/.github/workflows/push_ruby.yml +37 -0
  18. data/.github/workflows/trigger-wiki-refresh.yml +30 -0
  19. data/.github/workflows/verify-action-pins.yml +16 -0
  20. data/.gitignore +16 -0
  21. data/.rspec +3 -0
  22. data/.ruby-gemset +1 -0
  23. data/.ruby-version +1 -0
  24. data/.yardopts +2 -0
  25. data/CHANGELOG.md +344 -0
  26. data/Gemfile +5 -0
  27. data/MIT-LICENSE +22 -0
  28. data/README.md +78 -0
  29. data/Rakefile +96 -0
  30. data/dist/cyrus-sasl-2.1.28.tar.gz +0 -0
  31. data/dist/krb5-1.21.3.tar.gz +0 -0
  32. data/dist/openssl-3.0.16.tar.gz +0 -0
  33. data/dist/zlib-1.3.1.tar.gz +0 -0
  34. data/dist/zstd-1.5.7.tar.gz +0 -0
  35. data/docker-compose-ssl.yml +35 -0
  36. data/docker-compose.yml +25 -0
  37. data/ext/README.md +19 -0
  38. data/ext/Rakefile +131 -0
  39. data/ext/build_common.sh +376 -0
  40. data/ext/build_linux_aarch64_gnu.sh +326 -0
  41. data/ext/build_linux_x86_64_gnu.sh +317 -0
  42. data/ext/build_linux_x86_64_musl.sh +773 -0
  43. data/ext/build_macos_arm64.sh +557 -0
  44. data/ext/generate-ssl-certs.sh +109 -0
  45. data/ext/librdkafka.so +0 -0
  46. data/karafka-rdkafka.gemspec +65 -0
  47. data/lib/rdkafka/abstract_handle.rb +116 -0
  48. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  49. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  50. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  51. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  52. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  53. data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
  54. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  55. data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
  56. data/lib/rdkafka/admin/create_topic_report.rb +24 -0
  57. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  58. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  59. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  60. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  61. data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
  62. data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
  63. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  64. data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
  65. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  66. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  67. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  68. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  69. data/lib/rdkafka/admin.rb +832 -0
  70. data/lib/rdkafka/bindings.rb +583 -0
  71. data/lib/rdkafka/callbacks.rb +415 -0
  72. data/lib/rdkafka/config.rb +395 -0
  73. data/lib/rdkafka/consumer/headers.rb +79 -0
  74. data/lib/rdkafka/consumer/message.rb +86 -0
  75. data/lib/rdkafka/consumer/partition.rb +57 -0
  76. data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
  77. data/lib/rdkafka/consumer.rb +663 -0
  78. data/lib/rdkafka/error.rb +201 -0
  79. data/lib/rdkafka/helpers/oauth.rb +58 -0
  80. data/lib/rdkafka/helpers/time.rb +14 -0
  81. data/lib/rdkafka/metadata.rb +115 -0
  82. data/lib/rdkafka/native_kafka.rb +139 -0
  83. data/lib/rdkafka/producer/delivery_handle.rb +48 -0
  84. data/lib/rdkafka/producer/delivery_report.rb +45 -0
  85. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  86. data/lib/rdkafka/producer.rb +497 -0
  87. data/lib/rdkafka/version.rb +7 -0
  88. data/lib/rdkafka.rb +54 -0
  89. data/renovate.json +92 -0
  90. data/spec/integrations/ssl_stress_spec.rb +121 -0
  91. data/spec/lib/rdkafka/abstract_handle_spec.rb +117 -0
  92. data/spec/lib/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  93. data/spec/lib/rdkafka/admin/create_acl_report_spec.rb +18 -0
  94. data/spec/lib/rdkafka/admin/create_topic_handle_spec.rb +54 -0
  95. data/spec/lib/rdkafka/admin/create_topic_report_spec.rb +16 -0
  96. data/spec/lib/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  97. data/spec/lib/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  98. data/spec/lib/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
  99. data/spec/lib/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  100. data/spec/lib/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  101. data/spec/lib/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  102. data/spec/lib/rdkafka/admin_spec.rb +982 -0
  103. data/spec/lib/rdkafka/bindings_spec.rb +198 -0
  104. data/spec/lib/rdkafka/callbacks_spec.rb +20 -0
  105. data/spec/lib/rdkafka/config_spec.rb +258 -0
  106. data/spec/lib/rdkafka/consumer/headers_spec.rb +73 -0
  107. data/spec/lib/rdkafka/consumer/message_spec.rb +139 -0
  108. data/spec/lib/rdkafka/consumer/partition_spec.rb +57 -0
  109. data/spec/lib/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
  110. data/spec/lib/rdkafka/consumer_spec.rb +1343 -0
  111. data/spec/lib/rdkafka/error_spec.rb +95 -0
  112. data/spec/lib/rdkafka/metadata_spec.rb +79 -0
  113. data/spec/lib/rdkafka/native_kafka_spec.rb +130 -0
  114. data/spec/lib/rdkafka/producer/delivery_handle_spec.rb +60 -0
  115. data/spec/lib/rdkafka/producer/delivery_report_spec.rb +25 -0
  116. data/spec/lib/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  117. data/spec/lib/rdkafka/producer_spec.rb +1527 -0
  118. data/spec/spec_helper.rb +230 -0
  119. metadata +320 -0
data/ext/Rakefile ADDED
@@ -0,0 +1,131 @@
1
+ # frozen_string_literal: true
2
+
3
+ require File.expand_path('../../lib/rdkafka/version', __FILE__)
4
+ require "digest"
5
+ require "fileutils"
6
+ require "open-uri"
7
+
8
+ task :default => :clean do
9
+ # For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
10
+ # managed by nix itself, so using the normal file paths doesn't work for nix users.
11
+ #
12
+ # Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
13
+ # accessible in the nix environment
14
+ if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
15
+ # Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
16
+ require "mini_portile2"
17
+ recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
18
+
19
+ # Use default homebrew openssl if we're on mac and the directory exists, is not using nix-prepared libraries
20
+ # and each of flags is not already set
21
+ if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}") && !ENV.key?("NIX_LDFLAGS")
22
+ ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV.key?("CPPFLAGS")
23
+ ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV.key?("LDFLAGS")
24
+ end
25
+
26
+ releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
27
+
28
+ recipe.files << {
29
+ :url => "file://#{releases}/librdkafka-#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
30
+ :sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
31
+ }
32
+ recipe.configure_options = ["--host=#{recipe.host}"]
33
+
34
+ recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
35
+
36
+ # Disable using libc regex engine in favor of the embedded one
37
+ # The default regex engine of librdkafka does not always work exactly as most of the users
38
+ # would expect, hence this flag allows for changing it to the other one
39
+ if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
40
+ recipe.configure_options << '--disable-regex-ext'
41
+ end
42
+
43
+ recipe.cook
44
+ # Move dynamic library we're interested in
45
+ if recipe.host.include?('darwin')
46
+ from_extension = '1.dylib'
47
+ to_extension = 'dylib'
48
+ else
49
+ from_extension = 'so.1'
50
+ to_extension = 'so'
51
+ end
52
+ lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
53
+ FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
54
+ # Cleanup files created by miniportile we don't need in the gem
55
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
56
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
57
+ else
58
+ # Otherwise, copy existing libraries to ./ext
59
+ if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
60
+ raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
61
+ end
62
+ files = [
63
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
64
+ File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
65
+ ]
66
+ files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
67
+ end
68
+ end
69
+
70
+ task :clean do
71
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.dylib")
72
+ FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.so")
73
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
74
+ FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
75
+ end
76
+
77
+ namespace :dist do
78
+ task :dir do
79
+ ENV["RDKAFKA_DIST_PATH"] ||= File.expand_path(File.join(File.dirname(__FILE__), '..', 'dist'))
80
+ end
81
+
82
+ task :file => "dist:dir" do
83
+ ENV["RDKAFKA_DIST_FILE"] ||= File.join(ENV["RDKAFKA_DIST_PATH"], "librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz")
84
+ end
85
+
86
+ task :clean => "dist:file" do
87
+ Dir.glob(File.join("#{ENV['RDKAFKA_DIST_PATH']}", "*")).each do |filename|
88
+ next if filename.include? ENV["RDKAFKA_DIST_FILE"]
89
+
90
+ FileUtils.rm_rf filename
91
+ end
92
+ end
93
+
94
+ task :download => "dist:file" do
95
+ version = Rdkafka::LIBRDKAFKA_VERSION
96
+ librdkafka_download = "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{version}"
97
+
98
+ URI.open(librdkafka_download) do |file|
99
+ filename = ENV["RDKAFKA_DIST_FILE"]
100
+ data = file.read
101
+
102
+ if Digest::SHA256.hexdigest(data) != Rdkafka::LIBRDKAFKA_SOURCE_SHA256
103
+ raise "SHA256 does not match downloaded file"
104
+ end
105
+
106
+ File.write(filename, data)
107
+ end
108
+ end
109
+
110
+ task :update => %w[dist:download dist:clean]
111
+ end
112
+
113
+ namespace :build do
114
+ desc "Build librdkafka at the given git sha or tag"
115
+ task :git, [:ref] do |task, args|
116
+ ref = args[:ref]
117
+ version = "git-#{ref}"
118
+
119
+ recipe = MiniPortile.new("librdkafka", version)
120
+ recipe.files << "https://github.com/confluentinc/librdkafka/archive/#{ref}.tar.gz"
121
+ recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
122
+ recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
123
+ recipe.cook
124
+
125
+ ext = recipe.host.include?("darwin") ? "dylib" : "so"
126
+ lib = File.expand_path("ports/#{recipe.host}/librdkafka/#{version}/lib/librdkafka.#{ext}", __dir__)
127
+
128
+ # Copy will copy the content, following any symlinks
129
+ FileUtils.cp(lib, __dir__)
130
+ end
131
+ end
@@ -0,0 +1,376 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # Common functions and constants for librdkafka builds
4
+ # This file should be sourced by platform-specific build scripts
5
+ #
6
+ # Usage: source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh"
7
+ #
8
+
9
+ # Prevent multiple sourcing
10
+ if [[ "${BUILD_COMMON_SOURCED:-}" == "1" ]]; then
11
+ return 0
12
+ fi
13
+
14
+ BUILD_COMMON_SOURCED=1
15
+
16
+ # Version constants - update these to upgrade dependencies
17
+ readonly OPENSSL_VERSION="3.0.16"
18
+ readonly CYRUS_SASL_VERSION="2.1.28"
19
+ readonly ZLIB_VERSION="1.3.1"
20
+ readonly ZSTD_VERSION="1.5.7"
21
+ readonly KRB5_VERSION="1.21.3"
22
+ readonly LIBRDKAFKA_VERSION="2.11.0"
23
+
24
+ # SHA256 checksums for supply chain security
25
+ # Update these when upgrading versions
26
+ declare -A CHECKSUMS=(
27
+ ["openssl-${OPENSSL_VERSION}.tar.gz"]="57e03c50feab5d31b152af2b764f10379aecd8ee92f16c985983ce4a99f7ef86"
28
+ ["cyrus-sasl-${CYRUS_SASL_VERSION}.tar.gz"]="7ccfc6abd01ed67c1a0924b353e526f1b766b21f42d4562ee635a8ebfc5bb38c"
29
+ ["zlib-1.3.1.tar.gz"]="9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23"
30
+ ["zstd-${ZSTD_VERSION}.tar.gz"]="eb33e51f49a15e023950cd7825ca74a4a2b43db8354825ac24fc1b7ee09e6fa3"
31
+ ["krb5-${KRB5_VERSION}.tar.gz"]="b7a4cd5ead67fb08b980b21abd150ff7217e85ea320c9ed0c6dadd304840ad35"
32
+ ["librdkafka-${LIBRDKAFKA_VERSION}.tar.gz"]="592a823dc7c09ad4ded1bc8f700da6d4e0c88ffaf267815c6f25e7450b9395ca"
33
+ )
34
+
35
+ # Colors for output
36
+ readonly RED='\033[0;31m'
37
+ readonly GREEN='\033[0;32m'
38
+ readonly YELLOW='\033[1;33m'
39
+ readonly BLUE='\033[0;34m'
40
+ readonly NC='\033[0m' # No Color
41
+
42
+ # Logging functions
43
+ log() {
44
+ echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] $1${NC}"
45
+ }
46
+
47
+ warn() {
48
+ echo -e "${YELLOW}[WARNING] $1${NC}"
49
+ }
50
+
51
+ error() {
52
+ echo -e "${RED}[ERROR] $1${NC}"
53
+ exit 1
54
+ }
55
+
56
+ security_log() {
57
+ echo -e "${BLUE}[SECURITY] $1${NC}"
58
+ }
59
+
60
+ # Function to verify checksums
61
+ verify_checksum() {
62
+ local file="$1"
63
+ local expected_checksum="${CHECKSUMS[$file]}"
64
+
65
+ if [ -z "$expected_checksum" ]; then
66
+ error "No checksum defined for $file - this is a security risk!"
67
+ fi
68
+
69
+ security_log "Verifying checksum for $file..."
70
+ local actual_checksum
71
+
72
+ # Use platform-appropriate checksum command
73
+ if command -v sha256sum &> /dev/null; then
74
+ actual_checksum=$(sha256sum "$file" | cut -d' ' -f1)
75
+ elif command -v shasum &> /dev/null; then
76
+ actual_checksum=$(shasum -a 256 "$file" | cut -d' ' -f1)
77
+ else
78
+ error "No SHA256 checksum utility found (tried sha256sum, shasum)"
79
+ fi
80
+
81
+ if [ "$actual_checksum" = "$expected_checksum" ]; then
82
+ security_log "✅ Checksum verified for $file"
83
+ return 0
84
+ else
85
+ error "❌ CHECKSUM MISMATCH for $file!
86
+ Expected: $expected_checksum
87
+ Actual: $actual_checksum
88
+ This could indicate a supply chain attack or corrupted download!"
89
+ fi
90
+ }
91
+
92
+ # Function to securely download and verify files
93
+ secure_download() {
94
+ local url="$1"
95
+ local filename="$2"
96
+
97
+ # Check if file already exists in current directory (may have been already downloaded)
98
+ if [ -f "$filename" ]; then
99
+ log "File $filename already exists, verifying checksum..."
100
+ verify_checksum "$filename"
101
+ return 0
102
+ fi
103
+
104
+ # Check dist directory relative to script location
105
+ local script_dir
106
+ script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
107
+ local dist_file="$script_dir/../dist/$filename"
108
+
109
+ if [ -f "$dist_file" ]; then
110
+ log "Using distributed $filename from dist/"
111
+ cp "$dist_file" "$filename"
112
+ verify_checksum "$filename"
113
+ return 0
114
+ fi
115
+
116
+ log "Downloading $filename from $url..."
117
+
118
+ # Use platform-appropriate download command
119
+ if command -v wget &> /dev/null; then
120
+ # Linux - use wget with security options
121
+ if ! wget --secure-protocol=TLSv1_2 \
122
+ --https-only \
123
+ --timeout=30 \
124
+ --tries=3 \
125
+ --progress=bar \
126
+ "$url" \
127
+ -O "$filename"; then
128
+ error "Failed to download $filename from $url"
129
+ fi
130
+ elif command -v curl &> /dev/null; then
131
+ # macOS/fallback - use curl with security options
132
+ if ! curl -L \
133
+ --tlsv1.2 \
134
+ --connect-timeout 30 \
135
+ --max-time 300 \
136
+ --retry 3 \
137
+ --progress-bar \
138
+ "$url" \
139
+ -o "$filename"; then
140
+ error "Failed to download $filename from $url"
141
+ fi
142
+ else
143
+ error "No download utility found (tried wget, curl)"
144
+ fi
145
+
146
+ # Verify checksum immediately after download
147
+ verify_checksum "$filename"
148
+ }
149
+
150
+ # Function to detect CPU count for parallel builds
151
+ get_cpu_count() {
152
+ if command -v nproc &> /dev/null; then
153
+ nproc
154
+ elif command -v sysctl &> /dev/null; then
155
+ sysctl -n hw.ncpu
156
+ else
157
+ echo "4" # fallback
158
+ fi
159
+ }
160
+
161
+ # Function to auto-detect librdkafka tarball
162
+ find_librdkafka_tarball() {
163
+ local dist_dir="$1"
164
+ local tarball="$dist_dir/librdkafka-${LIBRDKAFKA_VERSION}.tar.gz"
165
+
166
+ if [ ! -f "$tarball" ]; then
167
+ error "librdkafka-${LIBRDKAFKA_VERSION}.tar.gz not found in $dist_dir"
168
+ fi
169
+
170
+ echo "$tarball"
171
+ }
172
+
173
+ # Function to find and validate patches
174
+ find_patches() {
175
+ local patches_dir="$1"
176
+ local -n patches_array=$2 # nameref to output array
177
+
178
+ patches_array=()
179
+
180
+ if [ -d "$patches_dir" ]; then
181
+ while IFS= read -r -d '' patch; do
182
+ patches_array+=("$patch")
183
+ done < <(find "$patches_dir" -name "*.patch" -type f -print0 | sort -z)
184
+
185
+ if [ ${#patches_array[@]} -gt 0 ]; then
186
+ log "Found ${#patches_array[@]} patches to apply:"
187
+ for patch in "${patches_array[@]}"; do
188
+ log " - $(basename "$patch")"
189
+ done
190
+ else
191
+ log "No patches found in $patches_dir"
192
+ fi
193
+ else
194
+ log "No patches directory found: $patches_dir"
195
+ fi
196
+ }
197
+
198
+ # Function to apply patches
199
+ apply_patches() {
200
+ local -n patches_array=$1 # nameref to patches array
201
+
202
+ if [ ${#patches_array[@]} -gt 0 ]; then
203
+ log "Applying Ruby-specific patches..."
204
+ for patch in "${patches_array[@]}"; do
205
+ log "Applying patch: $(basename "$patch")"
206
+ if patch -p1 < "$patch"; then
207
+ log "✅ Successfully applied $(basename "$patch")"
208
+ else
209
+ error "❌ Failed to apply patch: $(basename "$patch")"
210
+ fi
211
+ done
212
+ log "All patches applied successfully"
213
+ fi
214
+ }
215
+
216
+ # Function to verify librdkafka tarball checksum if available
217
+ verify_librdkafka_checksum() {
218
+ local tarball="$1"
219
+ local filename
220
+ filename=$(basename "$tarball")
221
+
222
+ if [ -n "${CHECKSUMS[$filename]:-}" ]; then
223
+ local current_dir
224
+ current_dir=$(pwd)
225
+ cd "$(dirname "$tarball")"
226
+ verify_checksum "$filename"
227
+ cd "$current_dir"
228
+ else
229
+ warn "No checksum defined for $filename - consider adding one for security"
230
+ fi
231
+ }
232
+
233
+ # Function to set execute permissions on configure scripts
234
+ fix_configure_permissions() {
235
+ log "Setting execute permissions on configure scripts..."
236
+ chmod +x configure* 2>/dev/null || true
237
+ chmod +x mklove/modules/configure.* 2>/dev/null || true
238
+ }
239
+
240
+ # Function to print security summary
241
+ print_security_summary() {
242
+ security_log "🔒 SECURITY VERIFICATION COMPLETE"
243
+ security_log "All dependencies downloaded and verified with SHA256 checksums"
244
+ security_log "Supply chain integrity maintained throughout build process"
245
+ }
246
+
247
+ # Function to print build summary
248
+ print_build_summary() {
249
+ local platform="$1"
250
+ local arch="$2"
251
+ local output_dir="$3"
252
+ local library_name="$4"
253
+
254
+ log "Build completed successfully!"
255
+ log "📦 Self-contained librdkafka built for $platform $arch:"
256
+ log " ✅ OpenSSL $OPENSSL_VERSION (SSL/TLS support) - checksum verified"
257
+ log " ✅ Cyrus SASL $CYRUS_SASL_VERSION (authentication for AWS MSK) - checksum verified"
258
+ log " ✅ MIT Kerberos $KRB5_VERSION (GSSAPI/Kerberos authentication) - checksum verified"
259
+ log " ✅ zlib $ZLIB_VERSION (compression) - checksum verified"
260
+ log " ✅ ZStd $ZSTD_VERSION (high-performance compression) - checksum verified"
261
+ log ""
262
+ log "🎯 Ready for deployment on $platform systems"
263
+ log "☁️ Compatible with AWS MSK and other secured Kafka clusters"
264
+ log "🔐 Supply chain security: All dependencies cryptographically verified"
265
+ log ""
266
+ log "Location: $output_dir/$library_name"
267
+ }
268
+
269
+ # Function to clean up build directory with user prompt (except .tar.gz files in CI)
270
+ cleanup_build_dir() {
271
+ local build_dir="$1"
272
+
273
+ if [ "${CI:-}" = "true" ]; then
274
+ # In CI: remove everything except .tar.gz files without prompting
275
+ echo "CI detected: cleaning up $build_dir (preserving .tar.gz files for caching)"
276
+
277
+ # First, find and move all .tar.gz files to a temp location
278
+ temp_dir=$(mktemp -d)
279
+ find "$build_dir" -name "*.tar.gz" -exec mv {} "$temp_dir/" \; 2>/dev/null || true
280
+
281
+ # Remove everything in build_dir
282
+ rm -rf "$build_dir"/* 2>/dev/null || true
283
+ rm -rf "$build_dir"/.* 2>/dev/null || true
284
+
285
+ # Move .tar.gz files back
286
+ mv "$temp_dir"/* "$build_dir/" 2>/dev/null || true
287
+ rmdir "$temp_dir" 2>/dev/null || true
288
+
289
+ log "Build directory cleaned up (preserved .tar.gz files)"
290
+ else
291
+ # Interactive mode: prompt user
292
+ echo
293
+ read -p "Remove build directory $build_dir? (y/N): " -n 1 -r
294
+ echo
295
+ if [[ $REPLY =~ ^[Yy]$ ]]; then
296
+ rm -rf "$build_dir"
297
+ log "Build directory cleaned up"
298
+ fi
299
+ fi
300
+ }
301
+
302
+ # Function to validate build environment
303
+ check_common_dependencies() {
304
+ log "Checking common build dependencies..."
305
+
306
+ local missing_tools=()
307
+
308
+ command -v tar &> /dev/null || missing_tools+=("tar")
309
+ command -v make &> /dev/null || missing_tools+=("make")
310
+ command -v patch &> /dev/null || missing_tools+=("patch")
311
+
312
+ # Check for download tools
313
+ if ! command -v wget &> /dev/null && ! command -v curl &> /dev/null; then
314
+ missing_tools+=("wget or curl")
315
+ fi
316
+
317
+ # Check for checksum tools
318
+ if ! command -v sha256sum &> /dev/null && ! command -v shasum &> /dev/null; then
319
+ missing_tools+=("sha256sum or shasum")
320
+ fi
321
+
322
+ if [ ${#missing_tools[@]} -gt 0 ]; then
323
+ error "Missing required tools: ${missing_tools[*]}"
324
+ fi
325
+
326
+ log "✅ Common build tools found"
327
+ }
328
+
329
+ # Function to extract tarball if directory doesn't exist
330
+ extract_if_needed() {
331
+ local tarball="$1"
332
+ local expected_dir="$2"
333
+
334
+ if [ ! -d "$expected_dir" ]; then
335
+ log "Extracting $(basename "$tarball")..."
336
+ tar xzf "$tarball"
337
+ else
338
+ log "Directory $expected_dir already exists, skipping extraction"
339
+ fi
340
+ }
341
+
342
+ # Download URLs for dependencies
343
+ get_openssl_url() {
344
+ echo "https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz"
345
+ }
346
+
347
+ get_sasl_url() {
348
+ echo "https://github.com/cyrusimap/cyrus-sasl/releases/download/cyrus-sasl-${CYRUS_SASL_VERSION}/cyrus-sasl-${CYRUS_SASL_VERSION}.tar.gz"
349
+ }
350
+
351
+ get_zlib_url() {
352
+ echo "https://github.com/madler/zlib/releases/download/v${ZLIB_VERSION}/zlib-${ZLIB_VERSION}.tar.gz"
353
+ }
354
+
355
+ get_zstd_url() {
356
+ echo "https://github.com/facebook/zstd/releases/download/v${ZSTD_VERSION}/zstd-${ZSTD_VERSION}.tar.gz"
357
+ }
358
+
359
+ get_krb5_url() {
360
+ # Using MIT mirror since kerberos.org is down
361
+ # echo "https://kerberos.org/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
362
+ echo "https://web.mit.edu/kerberos/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
363
+ }
364
+
365
+ # Export functions and variables that scripts will need
366
+ export -f log warn error security_log
367
+ export -f verify_checksum secure_download get_cpu_count
368
+ export -f find_librdkafka_tarball find_patches apply_patches
369
+ export -f verify_librdkafka_checksum fix_configure_permissions
370
+ export -f print_security_summary print_build_summary cleanup_build_dir
371
+ export -f check_common_dependencies extract_if_needed
372
+ export -f get_openssl_url get_sasl_url get_zlib_url get_zstd_url get_krb5_url
373
+
374
+ # Export constants
375
+ export OPENSSL_VERSION CYRUS_SASL_VERSION ZLIB_VERSION ZSTD_VERSION KRB5_VERSION
376
+ export RED GREEN YELLOW BLUE NC