karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.github/CODEOWNERS +3 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
- data/.github/workflows/ci_macos_arm64.yml +301 -0
- data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
- data/.github/workflows/push_ruby.yml +37 -0
- data/.github/workflows/verify-action-pins.yml +16 -0
- data/.gitignore +15 -0
- data/.rspec +2 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/.yardopts +2 -0
- data/CHANGELOG.md +323 -0
- data/Gemfile +5 -0
- data/MIT-LICENSE +22 -0
- data/README.md +177 -0
- data/Rakefile +96 -0
- data/docker-compose.yml +25 -0
- data/ext/README.md +19 -0
- data/ext/Rakefile +131 -0
- data/ext/build_common.sh +361 -0
- data/ext/build_linux_x86_64_gnu.sh +306 -0
- data/ext/build_macos_arm64.sh +550 -0
- data/ext/librdkafka.so +0 -0
- data/karafka-rdkafka.gemspec +61 -0
- data/lib/rdkafka/abstract_handle.rb +116 -0
- data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
- data/lib/rdkafka/admin/config_binding_result.rb +30 -0
- data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
- data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
- data/lib/rdkafka/admin/create_acl_report.rb +24 -0
- data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
- data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/create_topic_report.rb +24 -0
- data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
- data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
- data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
- data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
- data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
- data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
- data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
- data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
- data/lib/rdkafka/admin.rb +832 -0
- data/lib/rdkafka/bindings.rb +582 -0
- data/lib/rdkafka/callbacks.rb +415 -0
- data/lib/rdkafka/config.rb +398 -0
- data/lib/rdkafka/consumer/headers.rb +79 -0
- data/lib/rdkafka/consumer/message.rb +86 -0
- data/lib/rdkafka/consumer/partition.rb +57 -0
- data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
- data/lib/rdkafka/consumer.rb +663 -0
- data/lib/rdkafka/error.rb +201 -0
- data/lib/rdkafka/helpers/oauth.rb +58 -0
- data/lib/rdkafka/helpers/time.rb +14 -0
- data/lib/rdkafka/metadata.rb +115 -0
- data/lib/rdkafka/native_kafka.rb +139 -0
- data/lib/rdkafka/producer/delivery_handle.rb +48 -0
- data/lib/rdkafka/producer/delivery_report.rb +45 -0
- data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
- data/lib/rdkafka/producer.rb +492 -0
- data/lib/rdkafka/version.rb +7 -0
- data/lib/rdkafka.rb +54 -0
- data/renovate.json +92 -0
- data/spec/rdkafka/abstract_handle_spec.rb +117 -0
- data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
- data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
- data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
- data/spec/rdkafka/admin_spec.rb +769 -0
- data/spec/rdkafka/bindings_spec.rb +222 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +258 -0
- data/spec/rdkafka/consumer/headers_spec.rb +73 -0
- data/spec/rdkafka/consumer/message_spec.rb +139 -0
- data/spec/rdkafka/consumer/partition_spec.rb +57 -0
- data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
- data/spec/rdkafka/consumer_spec.rb +1299 -0
- data/spec/rdkafka/error_spec.rb +95 -0
- data/spec/rdkafka/metadata_spec.rb +79 -0
- data/spec/rdkafka/native_kafka_spec.rb +130 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
- data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
- data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
- data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
- data/spec/rdkafka/producer_spec.rb +1234 -0
- data/spec/spec_helper.rb +181 -0
- metadata +244 -0
data/ext/Rakefile
ADDED
@@ -0,0 +1,131 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require File.expand_path('../../lib/rdkafka/version', __FILE__)
|
4
|
+
require "digest"
|
5
|
+
require "fileutils"
|
6
|
+
require "open-uri"
|
7
|
+
|
8
|
+
task :default => :clean do
|
9
|
+
# For nix users, nix can't locate the file paths because the packages it's requiring aren't managed by the system but are
|
10
|
+
# managed by nix itself, so using the normal file paths doesn't work for nix users.
|
11
|
+
#
|
12
|
+
# Mini_portile causes an issue because it's dependencies are downloaded on the fly and therefore don't exist/aren't
|
13
|
+
# accessible in the nix environment
|
14
|
+
if ENV.fetch('RDKAFKA_EXT_PATH', '').empty?
|
15
|
+
# Download and compile librdkafka if RDKAFKA_EXT_PATH is not set
|
16
|
+
require "mini_portile2"
|
17
|
+
recipe = MiniPortile.new("librdkafka", Rdkafka::LIBRDKAFKA_VERSION)
|
18
|
+
|
19
|
+
# Use default homebrew openssl if we're on mac and the directory exists, is not using nix-prepared libraries
|
20
|
+
# and each of flags is not already set
|
21
|
+
if recipe.host&.include?("darwin") && system("which brew &> /dev/null") && Dir.exist?("#{homebrew_prefix = %x(brew --prefix openssl).strip}") && !ENV.key?("NIX_LDFLAGS")
|
22
|
+
ENV["CPPFLAGS"] = "-I#{homebrew_prefix}/include" unless ENV.key?("CPPFLAGS")
|
23
|
+
ENV["LDFLAGS"] = "-L#{homebrew_prefix}/lib" unless ENV.key?("LDFLAGS")
|
24
|
+
end
|
25
|
+
|
26
|
+
releases = File.expand_path(File.join(File.dirname(__FILE__), '../dist'))
|
27
|
+
|
28
|
+
recipe.files << {
|
29
|
+
:url => "file://#{releases}/librdkafka-#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz",
|
30
|
+
:sha256 => Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
31
|
+
}
|
32
|
+
recipe.configure_options = ["--host=#{recipe.host}"]
|
33
|
+
|
34
|
+
recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
|
35
|
+
|
36
|
+
# Disable using libc regex engine in favor of the embedded one
|
37
|
+
# The default regex engine of librdkafka does not always work exactly as most of the users
|
38
|
+
# would expect, hence this flag allows for changing it to the other one
|
39
|
+
if ENV.key?('RDKAFKA_DISABLE_REGEX_EXT')
|
40
|
+
recipe.configure_options << '--disable-regex-ext'
|
41
|
+
end
|
42
|
+
|
43
|
+
recipe.cook
|
44
|
+
# Move dynamic library we're interested in
|
45
|
+
if recipe.host.include?('darwin')
|
46
|
+
from_extension = '1.dylib'
|
47
|
+
to_extension = 'dylib'
|
48
|
+
else
|
49
|
+
from_extension = 'so.1'
|
50
|
+
to_extension = 'so'
|
51
|
+
end
|
52
|
+
lib_path = File.join(File.dirname(__FILE__), "ports/#{recipe.host}/librdkafka/#{Rdkafka::LIBRDKAFKA_VERSION}/lib/librdkafka.#{from_extension}")
|
53
|
+
FileUtils.mv(lib_path, File.join(File.dirname(__FILE__), "librdkafka.#{to_extension}"))
|
54
|
+
# Cleanup files created by miniportile we don't need in the gem
|
55
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
56
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
57
|
+
else
|
58
|
+
# Otherwise, copy existing libraries to ./ext
|
59
|
+
if ENV['RDKAFKA_EXT_PATH'].nil? || ENV['RDKAFKA_EXT_PATH'].empty?
|
60
|
+
raise "RDKAFKA_EXT_PATH must be set in your nix config when running under nix"
|
61
|
+
end
|
62
|
+
files = [
|
63
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.dylib'),
|
64
|
+
File.join(ENV['RDKAFKA_EXT_PATH'], 'lib', 'librdkafka.so')
|
65
|
+
]
|
66
|
+
files.each { |ext| FileUtils.cp(ext, File.dirname(__FILE__)) if File.exist?(ext) }
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
task :clean do
|
71
|
+
FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.dylib")
|
72
|
+
FileUtils.rm_f File.join(File.dirname(__FILE__), "librdkafka.so")
|
73
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "ports")
|
74
|
+
FileUtils.rm_rf File.join(File.dirname(__FILE__), "tmp")
|
75
|
+
end
|
76
|
+
|
77
|
+
namespace :dist do
|
78
|
+
task :dir do
|
79
|
+
ENV["RDKAFKA_DIST_PATH"] ||= File.expand_path(File.join(File.dirname(__FILE__), '..', 'dist'))
|
80
|
+
end
|
81
|
+
|
82
|
+
task :file => "dist:dir" do
|
83
|
+
ENV["RDKAFKA_DIST_FILE"] ||= File.join(ENV["RDKAFKA_DIST_PATH"], "librdkafka_#{Rdkafka::LIBRDKAFKA_VERSION}.tar.gz")
|
84
|
+
end
|
85
|
+
|
86
|
+
task :clean => "dist:file" do
|
87
|
+
Dir.glob(File.join("#{ENV['RDKAFKA_DIST_PATH']}", "*")).each do |filename|
|
88
|
+
next if filename.include? ENV["RDKAFKA_DIST_FILE"]
|
89
|
+
|
90
|
+
FileUtils.rm_rf filename
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
task :download => "dist:file" do
|
95
|
+
version = Rdkafka::LIBRDKAFKA_VERSION
|
96
|
+
librdkafka_download = "https://codeload.github.com/confluentinc/librdkafka/tar.gz/v#{version}"
|
97
|
+
|
98
|
+
URI.open(librdkafka_download) do |file|
|
99
|
+
filename = ENV["RDKAFKA_DIST_FILE"]
|
100
|
+
data = file.read
|
101
|
+
|
102
|
+
if Digest::SHA256.hexdigest(data) != Rdkafka::LIBRDKAFKA_SOURCE_SHA256
|
103
|
+
raise "SHA256 does not match downloaded file"
|
104
|
+
end
|
105
|
+
|
106
|
+
File.write(filename, data)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
|
110
|
+
task :update => %w[dist:download dist:clean]
|
111
|
+
end
|
112
|
+
|
113
|
+
namespace :build do
|
114
|
+
desc "Build librdkafka at the given git sha or tag"
|
115
|
+
task :git, [:ref] do |task, args|
|
116
|
+
ref = args[:ref]
|
117
|
+
version = "git-#{ref}"
|
118
|
+
|
119
|
+
recipe = MiniPortile.new("librdkafka", version)
|
120
|
+
recipe.files << "https://github.com/confluentinc/librdkafka/archive/#{ref}.tar.gz"
|
121
|
+
recipe.configure_options = ["--host=#{recipe.host}","--enable-static", "--enable-zstd"]
|
122
|
+
recipe.patch_files = Dir[File.join(releases, 'patches', "*.patch")].sort
|
123
|
+
recipe.cook
|
124
|
+
|
125
|
+
ext = recipe.host.include?("darwin") ? "dylib" : "so"
|
126
|
+
lib = File.expand_path("ports/#{recipe.host}/librdkafka/#{version}/lib/librdkafka.#{ext}", __dir__)
|
127
|
+
|
128
|
+
# Copy will copy the content, following any symlinks
|
129
|
+
FileUtils.cp(lib, __dir__)
|
130
|
+
end
|
131
|
+
end
|
data/ext/build_common.sh
ADDED
@@ -0,0 +1,361 @@
|
|
1
|
+
#!/usr/bin/env bash
|
2
|
+
#
|
3
|
+
# Common functions and constants for librdkafka builds
|
4
|
+
# This file should be sourced by platform-specific build scripts
|
5
|
+
#
|
6
|
+
# Usage: source "$(dirname "${BASH_SOURCE[0]}")/build_common.sh"
|
7
|
+
#
|
8
|
+
|
9
|
+
# Prevent multiple sourcing
|
10
|
+
if [[ "${BUILD_COMMON_SOURCED:-}" == "1" ]]; then
|
11
|
+
return 0
|
12
|
+
fi
|
13
|
+
|
14
|
+
BUILD_COMMON_SOURCED=1
|
15
|
+
|
16
|
+
# Version constants - update these to upgrade dependencies
|
17
|
+
readonly OPENSSL_VERSION="3.0.16"
|
18
|
+
readonly CYRUS_SASL_VERSION="2.1.28"
|
19
|
+
readonly ZLIB_VERSION="1.3.1"
|
20
|
+
readonly ZSTD_VERSION="1.5.7"
|
21
|
+
readonly KRB5_VERSION="1.21.3"
|
22
|
+
readonly LIBRDKAFKA_VERSION="2.8.0"
|
23
|
+
|
24
|
+
# SHA256 checksums for supply chain security
|
25
|
+
# Update these when upgrading versions
|
26
|
+
declare -A CHECKSUMS=(
|
27
|
+
["openssl-${OPENSSL_VERSION}.tar.gz"]="57e03c50feab5d31b152af2b764f10379aecd8ee92f16c985983ce4a99f7ef86"
|
28
|
+
["cyrus-sasl-${CYRUS_SASL_VERSION}.tar.gz"]="7ccfc6abd01ed67c1a0924b353e526f1b766b21f42d4562ee635a8ebfc5bb38c"
|
29
|
+
["zlib-1.3.1.tar.gz"]="9a93b2b7dfdac77ceba5a558a580e74667dd6fede4585b91eefb60f03b72df23"
|
30
|
+
["zstd-${ZSTD_VERSION}.tar.gz"]="eb33e51f49a15e023950cd7825ca74a4a2b43db8354825ac24fc1b7ee09e6fa3"
|
31
|
+
["krb5-${KRB5_VERSION}.tar.gz"]="b7a4cd5ead67fb08b980b21abd150ff7217e85ea320c9ed0c6dadd304840ad35"
|
32
|
+
["librdkafka-${LIBRDKAFKA_VERSION}.tar.gz"]="5bd1c46f63265f31c6bfcedcde78703f77d28238eadf23821c2b43fc30be3e25"
|
33
|
+
)
|
34
|
+
|
35
|
+
# Colors for output
|
36
|
+
readonly RED='\033[0;31m'
|
37
|
+
readonly GREEN='\033[0;32m'
|
38
|
+
readonly YELLOW='\033[1;33m'
|
39
|
+
readonly BLUE='\033[0;34m'
|
40
|
+
readonly NC='\033[0m' # No Color
|
41
|
+
|
42
|
+
# Logging functions
|
43
|
+
log() {
|
44
|
+
echo -e "${GREEN}[$(date '+%Y-%m-%d %H:%M:%S')] $1${NC}"
|
45
|
+
}
|
46
|
+
|
47
|
+
warn() {
|
48
|
+
echo -e "${YELLOW}[WARNING] $1${NC}"
|
49
|
+
}
|
50
|
+
|
51
|
+
error() {
|
52
|
+
echo -e "${RED}[ERROR] $1${NC}"
|
53
|
+
exit 1
|
54
|
+
}
|
55
|
+
|
56
|
+
security_log() {
|
57
|
+
echo -e "${BLUE}[SECURITY] $1${NC}"
|
58
|
+
}
|
59
|
+
|
60
|
+
# Function to verify checksums
|
61
|
+
verify_checksum() {
|
62
|
+
local file="$1"
|
63
|
+
local expected_checksum="${CHECKSUMS[$file]}"
|
64
|
+
|
65
|
+
if [ -z "$expected_checksum" ]; then
|
66
|
+
error "No checksum defined for $file - this is a security risk!"
|
67
|
+
fi
|
68
|
+
|
69
|
+
security_log "Verifying checksum for $file..."
|
70
|
+
local actual_checksum
|
71
|
+
|
72
|
+
# Use platform-appropriate checksum command
|
73
|
+
if command -v sha256sum &> /dev/null; then
|
74
|
+
actual_checksum=$(sha256sum "$file" | cut -d' ' -f1)
|
75
|
+
elif command -v shasum &> /dev/null; then
|
76
|
+
actual_checksum=$(shasum -a 256 "$file" | cut -d' ' -f1)
|
77
|
+
else
|
78
|
+
error "No SHA256 checksum utility found (tried sha256sum, shasum)"
|
79
|
+
fi
|
80
|
+
|
81
|
+
if [ "$actual_checksum" = "$expected_checksum" ]; then
|
82
|
+
security_log "✅ Checksum verified for $file"
|
83
|
+
return 0
|
84
|
+
else
|
85
|
+
error "❌ CHECKSUM MISMATCH for $file!
|
86
|
+
Expected: $expected_checksum
|
87
|
+
Actual: $actual_checksum
|
88
|
+
This could indicate a supply chain attack or corrupted download!"
|
89
|
+
fi
|
90
|
+
}
|
91
|
+
|
92
|
+
# Function to securely download and verify files
|
93
|
+
secure_download() {
|
94
|
+
local url="$1"
|
95
|
+
local filename="$2"
|
96
|
+
|
97
|
+
if [ -f "$filename" ]; then
|
98
|
+
log "File $filename already exists, verifying checksum..."
|
99
|
+
verify_checksum "$filename"
|
100
|
+
return 0
|
101
|
+
fi
|
102
|
+
|
103
|
+
log "Downloading $filename from $url..."
|
104
|
+
|
105
|
+
# Use platform-appropriate download command
|
106
|
+
if command -v wget &> /dev/null; then
|
107
|
+
# Linux - use wget with security options
|
108
|
+
if ! wget --secure-protocol=TLSv1_2 \
|
109
|
+
--https-only \
|
110
|
+
--timeout=30 \
|
111
|
+
--tries=3 \
|
112
|
+
--progress=bar \
|
113
|
+
"$url" \
|
114
|
+
-O "$filename"; then
|
115
|
+
error "Failed to download $filename from $url"
|
116
|
+
fi
|
117
|
+
elif command -v curl &> /dev/null; then
|
118
|
+
# macOS/fallback - use curl with security options
|
119
|
+
if ! curl -L \
|
120
|
+
--tlsv1.2 \
|
121
|
+
--connect-timeout 30 \
|
122
|
+
--max-time 300 \
|
123
|
+
--retry 3 \
|
124
|
+
--progress-bar \
|
125
|
+
"$url" \
|
126
|
+
-o "$filename"; then
|
127
|
+
error "Failed to download $filename from $url"
|
128
|
+
fi
|
129
|
+
else
|
130
|
+
error "No download utility found (tried wget, curl)"
|
131
|
+
fi
|
132
|
+
|
133
|
+
# Verify checksum immediately after download
|
134
|
+
verify_checksum "$filename"
|
135
|
+
}
|
136
|
+
|
137
|
+
# Function to detect CPU count for parallel builds
|
138
|
+
get_cpu_count() {
|
139
|
+
if command -v nproc &> /dev/null; then
|
140
|
+
nproc
|
141
|
+
elif command -v sysctl &> /dev/null; then
|
142
|
+
sysctl -n hw.ncpu
|
143
|
+
else
|
144
|
+
echo "4" # fallback
|
145
|
+
fi
|
146
|
+
}
|
147
|
+
|
148
|
+
# Function to auto-detect librdkafka tarball
|
149
|
+
find_librdkafka_tarball() {
|
150
|
+
local dist_dir="$1"
|
151
|
+
local tarball="$dist_dir/librdkafka-${LIBRDKAFKA_VERSION}.tar.gz"
|
152
|
+
|
153
|
+
if [ ! -f "$tarball" ]; then
|
154
|
+
error "librdkafka-${LIBRDKAFKA_VERSION}.tar.gz not found in $dist_dir"
|
155
|
+
fi
|
156
|
+
|
157
|
+
echo "$tarball"
|
158
|
+
}
|
159
|
+
|
160
|
+
# Function to find and validate patches
|
161
|
+
find_patches() {
|
162
|
+
local patches_dir="$1"
|
163
|
+
local -n patches_array=$2 # nameref to output array
|
164
|
+
|
165
|
+
patches_array=()
|
166
|
+
|
167
|
+
if [ -d "$patches_dir" ]; then
|
168
|
+
while IFS= read -r -d '' patch; do
|
169
|
+
patches_array+=("$patch")
|
170
|
+
done < <(find "$patches_dir" -name "*.patch" -type f -print0 | sort -z)
|
171
|
+
|
172
|
+
if [ ${#patches_array[@]} -gt 0 ]; then
|
173
|
+
log "Found ${#patches_array[@]} patches to apply:"
|
174
|
+
for patch in "${patches_array[@]}"; do
|
175
|
+
log " - $(basename "$patch")"
|
176
|
+
done
|
177
|
+
else
|
178
|
+
log "No patches found in $patches_dir"
|
179
|
+
fi
|
180
|
+
else
|
181
|
+
log "No patches directory found: $patches_dir"
|
182
|
+
fi
|
183
|
+
}
|
184
|
+
|
185
|
+
# Function to apply patches
|
186
|
+
apply_patches() {
|
187
|
+
local -n patches_array=$1 # nameref to patches array
|
188
|
+
|
189
|
+
if [ ${#patches_array[@]} -gt 0 ]; then
|
190
|
+
log "Applying Ruby-specific patches..."
|
191
|
+
for patch in "${patches_array[@]}"; do
|
192
|
+
log "Applying patch: $(basename "$patch")"
|
193
|
+
if patch -p1 < "$patch"; then
|
194
|
+
log "✅ Successfully applied $(basename "$patch")"
|
195
|
+
else
|
196
|
+
error "❌ Failed to apply patch: $(basename "$patch")"
|
197
|
+
fi
|
198
|
+
done
|
199
|
+
log "All patches applied successfully"
|
200
|
+
fi
|
201
|
+
}
|
202
|
+
|
203
|
+
# Function to verify librdkafka tarball checksum if available
|
204
|
+
verify_librdkafka_checksum() {
|
205
|
+
local tarball="$1"
|
206
|
+
local filename
|
207
|
+
filename=$(basename "$tarball")
|
208
|
+
|
209
|
+
if [ -n "${CHECKSUMS[$filename]:-}" ]; then
|
210
|
+
local current_dir
|
211
|
+
current_dir=$(pwd)
|
212
|
+
cd "$(dirname "$tarball")"
|
213
|
+
verify_checksum "$filename"
|
214
|
+
cd "$current_dir"
|
215
|
+
else
|
216
|
+
warn "No checksum defined for $filename - consider adding one for security"
|
217
|
+
fi
|
218
|
+
}
|
219
|
+
|
220
|
+
# Function to set execute permissions on configure scripts
|
221
|
+
fix_configure_permissions() {
|
222
|
+
log "Setting execute permissions on configure scripts..."
|
223
|
+
chmod +x configure* 2>/dev/null || true
|
224
|
+
chmod +x mklove/modules/configure.* 2>/dev/null || true
|
225
|
+
}
|
226
|
+
|
227
|
+
# Function to print security summary
|
228
|
+
print_security_summary() {
|
229
|
+
security_log "🔒 SECURITY VERIFICATION COMPLETE"
|
230
|
+
security_log "All dependencies downloaded and verified with SHA256 checksums"
|
231
|
+
security_log "Supply chain integrity maintained throughout build process"
|
232
|
+
}
|
233
|
+
|
234
|
+
# Function to print build summary
|
235
|
+
print_build_summary() {
|
236
|
+
local platform="$1"
|
237
|
+
local arch="$2"
|
238
|
+
local output_dir="$3"
|
239
|
+
local library_name="$4"
|
240
|
+
|
241
|
+
log "Build completed successfully!"
|
242
|
+
log "📦 Self-contained librdkafka built for $platform $arch:"
|
243
|
+
log " ✅ OpenSSL $OPENSSL_VERSION (SSL/TLS support) - checksum verified"
|
244
|
+
log " ✅ Cyrus SASL $CYRUS_SASL_VERSION (authentication for AWS MSK) - checksum verified"
|
245
|
+
log " ✅ MIT Kerberos $KRB5_VERSION (GSSAPI/Kerberos authentication) - checksum verified"
|
246
|
+
log " ✅ zlib $ZLIB_VERSION (compression) - checksum verified"
|
247
|
+
log " ✅ ZStd $ZSTD_VERSION (high-performance compression) - checksum verified"
|
248
|
+
log ""
|
249
|
+
log "🎯 Ready for deployment on $platform systems"
|
250
|
+
log "☁️ Compatible with AWS MSK and other secured Kafka clusters"
|
251
|
+
log "🔐 Supply chain security: All dependencies cryptographically verified"
|
252
|
+
log ""
|
253
|
+
log "Location: $output_dir/$library_name"
|
254
|
+
}
|
255
|
+
|
256
|
+
# Function to clean up build directory with user prompt (except .tar.gz files in CI)
|
257
|
+
cleanup_build_dir() {
|
258
|
+
local build_dir="$1"
|
259
|
+
|
260
|
+
if [ "${CI:-}" = "true" ]; then
|
261
|
+
# In CI: remove everything except .tar.gz files without prompting
|
262
|
+
echo "CI detected: cleaning up $build_dir (preserving .tar.gz files for caching)"
|
263
|
+
|
264
|
+
# First, find and move all .tar.gz files to a temp location
|
265
|
+
temp_dir=$(mktemp -d)
|
266
|
+
find "$build_dir" -name "*.tar.gz" -exec mv {} "$temp_dir/" \; 2>/dev/null || true
|
267
|
+
|
268
|
+
# Remove everything in build_dir
|
269
|
+
rm -rf "$build_dir"/* 2>/dev/null || true
|
270
|
+
rm -rf "$build_dir"/.* 2>/dev/null || true
|
271
|
+
|
272
|
+
# Move .tar.gz files back
|
273
|
+
mv "$temp_dir"/* "$build_dir/" 2>/dev/null || true
|
274
|
+
rmdir "$temp_dir" 2>/dev/null || true
|
275
|
+
|
276
|
+
log "Build directory cleaned up (preserved .tar.gz files)"
|
277
|
+
else
|
278
|
+
# Interactive mode: prompt user
|
279
|
+
echo
|
280
|
+
read -p "Remove build directory $build_dir? (y/N): " -n 1 -r
|
281
|
+
echo
|
282
|
+
if [[ $REPLY =~ ^[Yy]$ ]]; then
|
283
|
+
rm -rf "$build_dir"
|
284
|
+
log "Build directory cleaned up"
|
285
|
+
fi
|
286
|
+
fi
|
287
|
+
}
|
288
|
+
|
289
|
+
# Function to validate build environment
|
290
|
+
check_common_dependencies() {
|
291
|
+
log "Checking common build dependencies..."
|
292
|
+
|
293
|
+
local missing_tools=()
|
294
|
+
|
295
|
+
command -v tar &> /dev/null || missing_tools+=("tar")
|
296
|
+
command -v make &> /dev/null || missing_tools+=("make")
|
297
|
+
command -v patch &> /dev/null || missing_tools+=("patch")
|
298
|
+
|
299
|
+
# Check for download tools
|
300
|
+
if ! command -v wget &> /dev/null && ! command -v curl &> /dev/null; then
|
301
|
+
missing_tools+=("wget or curl")
|
302
|
+
fi
|
303
|
+
|
304
|
+
# Check for checksum tools
|
305
|
+
if ! command -v sha256sum &> /dev/null && ! command -v shasum &> /dev/null; then
|
306
|
+
missing_tools+=("sha256sum or shasum")
|
307
|
+
fi
|
308
|
+
|
309
|
+
if [ ${#missing_tools[@]} -gt 0 ]; then
|
310
|
+
error "Missing required tools: ${missing_tools[*]}"
|
311
|
+
fi
|
312
|
+
|
313
|
+
log "✅ Common build tools found"
|
314
|
+
}
|
315
|
+
|
316
|
+
# Function to extract tarball if directory doesn't exist
|
317
|
+
extract_if_needed() {
|
318
|
+
local tarball="$1"
|
319
|
+
local expected_dir="$2"
|
320
|
+
|
321
|
+
if [ ! -d "$expected_dir" ]; then
|
322
|
+
log "Extracting $(basename "$tarball")..."
|
323
|
+
tar xzf "$tarball"
|
324
|
+
else
|
325
|
+
log "Directory $expected_dir already exists, skipping extraction"
|
326
|
+
fi
|
327
|
+
}
|
328
|
+
|
329
|
+
# Download URLs for dependencies
|
330
|
+
get_openssl_url() {
|
331
|
+
echo "https://www.openssl.org/source/openssl-${OPENSSL_VERSION}.tar.gz"
|
332
|
+
}
|
333
|
+
|
334
|
+
get_sasl_url() {
|
335
|
+
echo "https://github.com/cyrusimap/cyrus-sasl/releases/download/cyrus-sasl-${CYRUS_SASL_VERSION}/cyrus-sasl-${CYRUS_SASL_VERSION}.tar.gz"
|
336
|
+
}
|
337
|
+
|
338
|
+
get_zlib_url() {
|
339
|
+
echo "https://github.com/madler/zlib/releases/download/v${ZLIB_VERSION}/zlib-${ZLIB_VERSION}.tar.gz"
|
340
|
+
}
|
341
|
+
|
342
|
+
get_zstd_url() {
|
343
|
+
echo "https://github.com/facebook/zstd/releases/download/v${ZSTD_VERSION}/zstd-${ZSTD_VERSION}.tar.gz"
|
344
|
+
}
|
345
|
+
|
346
|
+
get_krb5_url() {
|
347
|
+
echo "https://kerberos.org/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
|
348
|
+
}
|
349
|
+
|
350
|
+
# Export functions and variables that scripts will need
|
351
|
+
export -f log warn error security_log
|
352
|
+
export -f verify_checksum secure_download get_cpu_count
|
353
|
+
export -f find_librdkafka_tarball find_patches apply_patches
|
354
|
+
export -f verify_librdkafka_checksum fix_configure_permissions
|
355
|
+
export -f print_security_summary print_build_summary cleanup_build_dir
|
356
|
+
export -f check_common_dependencies extract_if_needed
|
357
|
+
export -f get_openssl_url get_sasl_url get_zlib_url get_zstd_url get_krb5_url
|
358
|
+
|
359
|
+
# Export constants
|
360
|
+
export OPENSSL_VERSION CYRUS_SASL_VERSION ZLIB_VERSION ZSTD_VERSION KRB5_VERSION
|
361
|
+
export RED GREEN YELLOW BLUE NC
|