karafka-rdkafka 0.21.0.rc1-x86_64-linux-musl → 0.21.0.rc2-x86_64-linux-musl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (68) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/{ci_linux_x86_64_musl.yml → ci_linux_alpine_x86_64_musl.yml} +12 -9
  3. data/.github/workflows/ci_linux_alpine_x86_64_musl_complementary.yml +264 -0
  4. data/.github/workflows/ci_linux_debian_x86_64_gnu.yml +271 -0
  5. data/.github/workflows/ci_linux_debian_x86_64_gnu_complementary.yml +334 -0
  6. data/.github/workflows/{ci_linux_x86_64_gnu.yml → ci_linux_ubuntu_aarch64_gnu.yml} +15 -15
  7. data/.github/workflows/ci_linux_ubuntu_aarch64_gnu_complementary.yml +295 -0
  8. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +281 -0
  9. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu_complementary.yml +294 -0
  10. data/.github/workflows/ci_macos_arm64.yml +5 -5
  11. data/.github/workflows/push_linux_aarch64_gnu.yml +65 -0
  12. data/.github/workflows/push_linux_x86_64_gnu.yml +2 -2
  13. data/.github/workflows/push_linux_x86_64_musl.yml +3 -3
  14. data/.github/workflows/push_macos_arm64.yml +2 -2
  15. data/.github/workflows/push_ruby.yml +1 -1
  16. data/.github/workflows/trigger-wiki-refresh.yml +30 -0
  17. data/.github/workflows/verify-action-pins.yml +1 -1
  18. data/.gitignore +1 -0
  19. data/.rspec +1 -0
  20. data/CHANGELOG.md +7 -1
  21. data/README.md +34 -134
  22. data/dist/cyrus-sasl-2.1.28.tar.gz +0 -0
  23. data/dist/krb5-1.21.3.tar.gz +0 -0
  24. data/dist/openssl-3.0.16.tar.gz +0 -0
  25. data/dist/zlib-1.3.1.tar.gz +0 -0
  26. data/dist/zstd-1.5.7.tar.gz +0 -0
  27. data/docker-compose-ssl.yml +35 -0
  28. data/ext/build_common.sh +16 -1
  29. data/ext/build_linux_aarch64_gnu.sh +326 -0
  30. data/ext/build_linux_x86_64_gnu.sh +12 -1
  31. data/ext/build_linux_x86_64_musl.sh +18 -8
  32. data/ext/build_macos_arm64.sh +7 -0
  33. data/ext/generate-ssl-certs.sh +109 -0
  34. data/ext/librdkafka.so +0 -0
  35. data/karafka-rdkafka.gemspec +2 -0
  36. data/lib/rdkafka/bindings.rb +0 -1
  37. data/lib/rdkafka/consumer.rb +1 -1
  38. data/lib/rdkafka/version.rb +1 -1
  39. data/spec/integrations/ssl_stress_spec.rb +121 -0
  40. data/spec/{rdkafka → lib/rdkafka}/admin_spec.rb +16 -4
  41. data/spec/{rdkafka → lib/rdkafka}/consumer_spec.rb +50 -3
  42. data/spec/{rdkafka → lib/rdkafka}/metadata_spec.rb +2 -2
  43. data/spec/{rdkafka → lib/rdkafka}/producer/delivery_report_spec.rb +1 -1
  44. data/spec/{rdkafka → lib/rdkafka}/producer_spec.rb +6 -5
  45. data/spec/spec_helper.rb +45 -8
  46. metadata +76 -31
  47. /data/spec/{rdkafka → lib/rdkafka}/abstract_handle_spec.rb +0 -0
  48. /data/spec/{rdkafka → lib/rdkafka}/admin/create_acl_handle_spec.rb +0 -0
  49. /data/spec/{rdkafka → lib/rdkafka}/admin/create_acl_report_spec.rb +0 -0
  50. /data/spec/{rdkafka → lib/rdkafka}/admin/create_topic_handle_spec.rb +0 -0
  51. /data/spec/{rdkafka → lib/rdkafka}/admin/create_topic_report_spec.rb +0 -0
  52. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_acl_handle_spec.rb +0 -0
  53. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_acl_report_spec.rb +0 -0
  54. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_topic_handle_spec.rb +0 -0
  55. /data/spec/{rdkafka → lib/rdkafka}/admin/delete_topic_report_spec.rb +0 -0
  56. /data/spec/{rdkafka → lib/rdkafka}/admin/describe_acl_handle_spec.rb +0 -0
  57. /data/spec/{rdkafka → lib/rdkafka}/admin/describe_acl_report_spec.rb +0 -0
  58. /data/spec/{rdkafka → lib/rdkafka}/bindings_spec.rb +0 -0
  59. /data/spec/{rdkafka → lib/rdkafka}/callbacks_spec.rb +0 -0
  60. /data/spec/{rdkafka → lib/rdkafka}/config_spec.rb +0 -0
  61. /data/spec/{rdkafka → lib/rdkafka}/consumer/headers_spec.rb +0 -0
  62. /data/spec/{rdkafka → lib/rdkafka}/consumer/message_spec.rb +0 -0
  63. /data/spec/{rdkafka → lib/rdkafka}/consumer/partition_spec.rb +0 -0
  64. /data/spec/{rdkafka → lib/rdkafka}/consumer/topic_partition_list_spec.rb +0 -0
  65. /data/spec/{rdkafka → lib/rdkafka}/error_spec.rb +0 -0
  66. /data/spec/{rdkafka → lib/rdkafka}/native_kafka_spec.rb +0 -0
  67. /data/spec/{rdkafka → lib/rdkafka}/producer/delivery_handle_spec.rb +0 -0
  68. /data/spec/{rdkafka → lib/rdkafka}/producer/partitions_count_cache_spec.rb +0 -0
data/README.md CHANGED
@@ -1,163 +1,63 @@
1
- # Rdkafka
1
+ # Karafka-Rdkafka
2
2
 
3
- [![Build Status](https://github.com/karafka/karafka-rdkafka/actions/workflows/ci_linux_x86_64_gnu.yml/badge.svg)](https://github.com/karafka/karafka-rdkafka/actions/workflows/ci_linux_x86_64_gnu.yml)
3
+ [![Build Status](https://github.com/karafka/karafka-rdkafka/actions/workflows/ci_linux_ubuntu_x86_64_gnu.yml/badge.svg)](https://github.com/karafka/karafka-rdkafka/actions/workflows/ci_linux_x86_64_gnu.yml)
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka-rdkafka.svg)](https://badge.fury.io/rb/karafka-rdkafka)
5
5
  [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
7
  > [!NOTE]
8
- > The `rdkafka-ruby` gem was created and developed by [AppSignal](https://www.appsignal.com/). Their impactful contributions have significantly shaped the Ruby Kafka and Karafka ecosystems. For robust monitoring, we highly recommend AppSignal.
8
+ > **Karafka-Rdkafka** is a fork of the [rdkafka-ruby](https://github.com/karafka/rdkafka-ruby) gem, specifically maintained for the [Karafka ecosystem](https://karafka.io). This fork exists to provide Karafka-specific patches and optimizations that are not generic enough for inclusion in the upstream rdkafka-ruby library.
9
9
 
10
10
  ---
11
11
 
12
- The `rdkafka` gem is a modern Kafka client library for Ruby based on
13
- [librdkafka](https://github.com/confluentinc/librdkafka/).
14
- It wraps the production-ready C client using the [ffi](https://github.com/ffi/ffi)
15
- gem and targets Kafka 1.0+ and Ruby versions under security or
16
- active maintenance. We remove a Ruby version from our CI builds when they
17
- become EOL.
12
+ ## Why This Fork Exists
18
13
 
19
- `rdkafka` was written because of the need for a reliable Ruby client for Kafka that supports modern Kafka at [AppSignal](https://appsignal.com). AppSignal runs it in production on very high-traffic systems.
14
+ While rdkafka-ruby serves as an excellent general-purpose Kafka client for Ruby, Karafka requires specific optimizations and patches that are:
20
15
 
21
- The most essential pieces of a Kafka client are implemented, and we aim to provide all relevant consumer, producer, and admin APIs.
16
+ - **Karafka-specific**: Tailored for Karafka's unique processing patterns and requirements
17
+ - **Performance-oriented**: Focused on high-throughput, low-latency scenarios that Karafka specializes in
18
+ - **Framework-integrated**: Designed to work seamlessly with Karafka's architecture and features
22
19
 
23
- ## Table of content
20
+ These modifications are not suitable for the upstream rdkafka-ruby library because they are either too specific to Karafka's use cases or might introduce breaking changes for other users of rdkafka-ruby.
24
21
 
25
- - [Project Scope](#project-scope)
26
- - [Installation](#installation)
27
- - [Usage](#usage)
28
- * [Consuming Messages](#consuming-messages)
29
- * [Producing Messages](#producing-messages)
30
- - [Higher Level Libraries](#higher-level-libraries)
31
- * [Message Processing Frameworks](#message-processing-frameworks)
32
- * [Message Publishing Libraries](#message-publishing-libraries)
33
- - [Forking](#forking)
34
- - [Development](#development)
35
- - [Example](#example)
36
- - [Versions](#versions)
22
+ ## Maintenance and Synchronization
37
23
 
38
- ## Project Scope
24
+ This fork is actively maintained and kept in sync with the upstream rdkafka-ruby repository as much as possible. We:
39
25
 
40
- While rdkafka-ruby aims to simplify the use of librdkafka in Ruby applications, it's important to understand the limitations of this library:
26
+ - **Regularly merge** upstream changes from rdkafka-ruby
27
+ - **Maintain compatibility** with the rdkafka-ruby API wherever possible
28
+ - **Apply minimal patches** to avoid diverging significantly from upstream
29
+ - **Merge back generic improvements** from karafka-rdkafka to rdkafka-ruby when they benefit the broader community
30
+ - **Test thoroughly** to ensure stability within the Karafka ecosystem
41
31
 
42
- - **No Complex Producers/Consumers**: This library does not intend to offer complex producers or consumers. The aim is to stick closely to the functionalities provided by librdkafka itself.
32
+ ## Long-term Plan
43
33
 
44
- - **Focus on librdkafka Capabilities**: Features that can be achieved directly in Ruby, without specific needs from librdkafka, are outside the scope of this library.
34
+ Our long-term goal is to work with the rdkafka-ruby maintainers to eventually merge the beneficial changes back upstream. This would allow us to:
45
35
 
46
- - **Existing High-Level Functionalities**: Certain high-level functionalities like producer metadata cache and simple consumer are already part of the library. Although they fall slightly outside the primary goal, they will remain part of the contract, given their existing usage.
36
+ - Reduce maintenance overhead
37
+ - Benefit the broader Ruby Kafka community
38
+ - Simplify the Karafka ecosystem dependencies
47
39
 
40
+ However, until such integration is possible, this fork ensures that Karafka users get the best possible performance and reliability.
48
41
 
49
- ## Installation
42
+ ### If You're Using Standalone rdkafka
50
43
 
51
- When installed, this gem downloads and compiles librdkafka. If you have any problems installing the gem, please open an issue.
44
+ You should use the [original rdkafka-ruby gem](https://github.com/karafka/rdkafka-ruby) for general Kafka client needs. This fork is specifically designed for Karafka and may not be suitable for other use cases.
52
45
 
53
- ## Usage
46
+ ### Reporting Issues
54
47
 
55
- Please see the [documentation](https://karafka.io/docs/code/rdkafka-ruby/) for full details on how to use this gem. Below are two quick examples.
48
+ For issues related to this fork, please report them in the [rdkafka-ruby repository](https://github.com/karafka/rdkafka-ruby/issues) rather than here. This helps us:
56
49
 
57
- Unless you are seeking specific low-level capabilities, we **strongly** recommend using [Karafka](https://github.com/karafka/karafka) and [WaterDrop](https://github.com/karafka/waterdrop) when working with Kafka. These are higher-level libraries also maintained by us based on rdkafka-ruby.
50
+ - Maintain a single place for issue tracking
51
+ - Ensure upstream compatibility
52
+ - Provide better support for all users
58
53
 
59
- ### Consuming Messages
54
+ ### Contributing
60
55
 
61
- Subscribe to a topic and get messages. Kafka will automatically spread
62
- the available partitions over consumers with the same group id.
56
+ Contributions should generally be made to the upstream [rdkafka-ruby repository](https://github.com/karafka/rdkafka-ruby). Changes to this fork are only made when:
63
57
 
64
- ```ruby
65
- config = {
66
- :"bootstrap.servers" => "localhost:9092",
67
- :"group.id" => "ruby-test"
68
- }
69
- consumer = Rdkafka::Config.new(config).consumer
70
- consumer.subscribe("ruby-test-topic")
71
-
72
- consumer.each do |message|
73
- puts "Message received: #{message}"
74
- end
75
- ```
76
-
77
- ### Producing Messages
78
-
79
- Produce several messages, put the delivery handles in an array, and
80
- wait for them before exiting. This way the messages will be batched and
81
- efficiently sent to Kafka.
82
-
83
- ```ruby
84
- config = {:"bootstrap.servers" => "localhost:9092"}
85
- producer = Rdkafka::Config.new(config).producer
86
- delivery_handles = []
87
-
88
- 100.times do |i|
89
- puts "Producing message #{i}"
90
- delivery_handles << producer.produce(
91
- topic: "ruby-test-topic",
92
- payload: "Payload #{i}",
93
- key: "Key #{i}"
94
- )
95
- end
96
-
97
- delivery_handles.each(&:wait)
98
- ```
99
-
100
- Note that creating a producer consumes some resources that will not be released until it `#close` is explicitly called, so be sure to call `Config#producer` only as necessary.
101
-
102
- ## Higher Level Libraries
103
-
104
- Currently, there are two actively developed frameworks based on `rdkafka-ruby`, that provide higher-level API that can be used to work with Kafka messages and one library for publishing messages.
105
-
106
- ### Message Processing Frameworks
107
-
108
- * [Karafka](https://github.com/karafka/karafka) - Ruby and Rails efficient Kafka processing framework.
109
- * [Racecar](https://github.com/zendesk/racecar) - A simple framework for Kafka consumers in Ruby
110
-
111
- ### Message Publishing Libraries
112
-
113
- * [WaterDrop](https://github.com/karafka/waterdrop) – Standalone Karafka library for producing Kafka messages.
114
-
115
- ## Forking
116
-
117
- When working with `rdkafka-ruby`, it's essential to know that the underlying `librdkafka` library does not support fork-safe operations, even though it is thread-safe. Forking a process after initializing librdkafka clients can lead to unpredictable behavior due to inherited file descriptors and memory states. This limitation requires careful handling, especially in Ruby applications that rely on forking.
118
-
119
- To address this, it's highly recommended to:
120
-
121
- - Never initialize any `rdkafka-ruby` producers or consumers before forking to avoid state corruption.
122
- - Before forking, always close any open producers or consumers if you've opened any.
123
- - Use high-level libraries like [WaterDrop](https://github.com/karafka/waterdrop) and [Karafka](https://github.com/karafka/karafka/), which provide abstractions for handling librdkafka's intricacies.
124
-
125
- ## Development
126
-
127
- Contributors are encouraged to focus on enhancements that align with the core goal of the library. We appreciate contributions but will likely not accept pull requests for features that:
128
-
129
- - Implement functionalities that can achieved using standard Ruby capabilities without changes to the underlying rdkafka-ruby bindings.
130
- - Deviate significantly from the primary aim of providing librdkafka bindings with Ruby-friendly interfaces.
131
-
132
- A Docker Compose file is included to run Kafka. To run that:
133
-
134
- ```
135
- docker-compose up
136
- ```
137
-
138
- Run `bundle` and `cd ext && bundle exec rake && cd ..` to download and compile `librdkafka`.
139
-
140
- You can then run `bundle exec rspec` to run the tests. To see rdkafka debug output:
141
-
142
- ```
143
- DEBUG_PRODUCER=true bundle exec rspec
144
- DEBUG_CONSUMER=true bundle exec rspec
145
- ```
146
-
147
- After running the tests, you can bring the cluster down to start with a clean slate:
148
-
149
- ```
150
- docker-compose down
151
- ```
152
-
153
- ## Example
154
-
155
- To see everything working, run these in separate tabs:
156
-
157
- ```
158
- bundle exec rake consume_messages
159
- bundle exec rake produce_messages
160
- ```
58
+ - They are specific to Karafka's requirements
59
+ - They cannot be generalized for upstream inclusion
60
+ - They are temporary while working on upstream integration
161
61
 
162
62
  ## Versions
163
63
 
Binary file
Binary file
Binary file
Binary file
Binary file
@@ -0,0 +1,35 @@
1
+ services:
2
+ kafka:
3
+ container_name: kafka
4
+ image: confluentinc/cp-kafka:8.0.0
5
+ ports:
6
+ - 9092:9092 # Support PLAINTEXT so we can run one docker setup for SSL and PLAINTEXT
7
+ - 9093:9093
8
+ volumes:
9
+ - ./ssl:/etc/kafka/secrets
10
+ environment:
11
+ CLUSTER_ID: kafka-docker-cluster-1
12
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
13
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
14
+ KAFKA_PROCESS_ROLES: broker,controller
15
+ KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
16
+ KAFKA_LISTENERS: PLAINTEXT://:9092,SSL://:9093,CONTROLLER://:9094
17
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT,SSL:SSL
18
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092,SSL://127.0.0.1:9093
19
+ KAFKA_BROKER_ID: 1
20
+ KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9094
21
+ ALLOW_PLAINTEXT_LISTENER: 'yes'
22
+ KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
23
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
24
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
25
+ KAFKA_ALLOW_EVERYONE_IF_NO_ACL_FOUND: "true"
26
+ KAFKA_AUTHORIZER_CLASS_NAME: org.apache.kafka.metadata.authorizer.StandardAuthorizer
27
+
28
+ # SSL Configuration
29
+ KAFKA_SSL_KEYSTORE_FILENAME: kafka.server.keystore.jks
30
+ KAFKA_SSL_KEYSTORE_CREDENTIALS: kafka_keystore_creds
31
+ KAFKA_SSL_KEY_CREDENTIALS: kafka_ssl_key_creds
32
+ KAFKA_SSL_TRUSTSTORE_FILENAME: kafka.server.truststore.jks
33
+ KAFKA_SSL_TRUSTSTORE_CREDENTIALS: kafka_truststore_creds
34
+ KAFKA_SSL_CLIENT_AUTH: none
35
+ KAFKA_SSL_ENDPOINT_IDENTIFICATION_ALGORITHM: ""
data/ext/build_common.sh CHANGED
@@ -94,12 +94,25 @@ secure_download() {
94
94
  local url="$1"
95
95
  local filename="$2"
96
96
 
97
+ # Check if file already exists in current directory (may have been already downloaded)
97
98
  if [ -f "$filename" ]; then
98
99
  log "File $filename already exists, verifying checksum..."
99
100
  verify_checksum "$filename"
100
101
  return 0
101
102
  fi
102
103
 
104
+ # Check dist directory relative to script location
105
+ local script_dir
106
+ script_dir="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
107
+ local dist_file="$script_dir/../dist/$filename"
108
+
109
+ if [ -f "$dist_file" ]; then
110
+ log "Using distributed $filename from dist/"
111
+ cp "$dist_file" "$filename"
112
+ verify_checksum "$filename"
113
+ return 0
114
+ fi
115
+
103
116
  log "Downloading $filename from $url..."
104
117
 
105
118
  # Use platform-appropriate download command
@@ -344,7 +357,9 @@ get_zstd_url() {
344
357
  }
345
358
 
346
359
  get_krb5_url() {
347
- echo "https://kerberos.org/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
360
+ # Using MIT mirror since kerberos.org is down
361
+ # echo "https://kerberos.org/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
362
+ echo "https://web.mit.edu/kerberos/dist/krb5/${KRB5_VERSION%.*}/krb5-${KRB5_VERSION}.tar.gz"
348
363
  }
349
364
 
350
365
  # Export functions and variables that scripts will need
@@ -0,0 +1,326 @@
1
+ #!/usr/bin/env bash
2
+ #
3
+ # Build self-contained librdkafka.so for Linux aarch64 GNU with checksum verification
4
+ # Usage: ./build_linux_aarch64_gnu.sh
5
+ #
6
+ # Expected directory structure:
7
+ # ext/build_linux_aarch64_gnu.sh (this script)
8
+ # ext/build_common.sh (shared functions)
9
+ # dist/librdkafka-*.tar.gz (librdkafka source tarball)
10
+ # dist/patches/*.patch (optional Ruby-specific patches)
11
+ #
12
+ set -euo pipefail
13
+
14
+ # Source common functions and constants
15
+ SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
16
+ source "$SCRIPT_DIR/build_common.sh"
17
+
18
+ # Platform-specific paths
19
+ DIST_DIR="$SCRIPT_DIR/../dist"
20
+ PATCHES_DIR="$DIST_DIR/patches"
21
+ BUILD_DIR="$(pwd)/build-tmp"
22
+ DEPS_PREFIX="/tmp"
23
+
24
+ # Check common dependencies
25
+ check_common_dependencies
26
+
27
+ # Linux-specific dependency check
28
+ log "Checking Linux aarch64 GNU-specific build dependencies..."
29
+ command -v gcc &> /dev/null || error "gcc not found. Install with: apt-get install build-essential"
30
+
31
+ # Verify we're on ARM64 or cross-compiling
32
+ ARCH=$(uname -m)
33
+ if [[ "$ARCH" != "aarch64" && "$ARCH" != "arm64" ]]; then
34
+ warn "Not running on aarch64 ($ARCH detected). Ensure you have aarch64 cross-compilation tools if needed."
35
+ fi
36
+
37
+ # Auto-detect librdkafka tarball
38
+ log "Looking for librdkafka tarball in $DIST_DIR..."
39
+ LIBRDKAFKA_TARBALL=$(find_librdkafka_tarball "$DIST_DIR")
40
+ log "Found librdkafka tarball: $LIBRDKAFKA_TARBALL"
41
+
42
+ # Verify librdkafka tarball checksum if available
43
+ verify_librdkafka_checksum "$LIBRDKAFKA_TARBALL"
44
+
45
+ # Find patches
46
+ PATCHES_FOUND=()
47
+ find_patches "$PATCHES_DIR" PATCHES_FOUND
48
+
49
+ security_log "Starting secure build with checksum verification enabled"
50
+ log "Building self-contained librdkafka.so for Linux aarch64 GNU"
51
+ log "Dependencies to build:"
52
+ log " - OpenSSL: $OPENSSL_VERSION"
53
+ log " - Cyrus SASL: $CYRUS_SASL_VERSION"
54
+ log " - MIT Kerberos: $KRB5_VERSION"
55
+ log " - zlib: $ZLIB_VERSION"
56
+ log " - ZStd: $ZSTD_VERSION"
57
+ log "librdkafka source: $LIBRDKAFKA_TARBALL"
58
+ log "Build directory: $BUILD_DIR"
59
+
60
+ # Create build directory
61
+ mkdir -p "$BUILD_DIR"
62
+ cd "$BUILD_DIR"
63
+
64
+ # Build OpenSSL
65
+ log "Building OpenSSL $OPENSSL_VERSION for ARM64..."
66
+ OPENSSL_PREFIX="$DEPS_PREFIX/static-openssl-$OPENSSL_VERSION-arm64"
67
+ OPENSSL_TARBALL="openssl-$OPENSSL_VERSION.tar.gz"
68
+ OPENSSL_DIR="openssl-$OPENSSL_VERSION"
69
+
70
+ secure_download "$(get_openssl_url)" "$OPENSSL_TARBALL"
71
+ extract_if_needed "$OPENSSL_TARBALL" "$OPENSSL_DIR"
72
+ cd "$OPENSSL_DIR"
73
+
74
+ # Check if OpenSSL lib directory exists (lib or lib64)
75
+ if [ ! -f "$OPENSSL_PREFIX/lib/libssl.a" ] && [ ! -f "$OPENSSL_PREFIX/lib64/libssl.a" ]; then
76
+ log "Configuring and building OpenSSL for ARM64..."
77
+ export CFLAGS="-fPIC"
78
+ # Use linux-aarch64 target for ARM64
79
+ ./Configure linux-aarch64 \
80
+ no-shared \
81
+ no-dso \
82
+ --prefix="$OPENSSL_PREFIX"
83
+ make clean || true
84
+ make -j$(get_cpu_count)
85
+ make install
86
+ unset CFLAGS
87
+ log "OpenSSL built successfully"
88
+ else
89
+ log "OpenSSL already built, skipping..."
90
+ fi
91
+
92
+ # Determine OpenSSL lib directory
93
+ if [ -f "$OPENSSL_PREFIX/lib64/libssl.a" ]; then
94
+ OPENSSL_LIB_DIR="$OPENSSL_PREFIX/lib64"
95
+ else
96
+ OPENSSL_LIB_DIR="$OPENSSL_PREFIX/lib"
97
+ fi
98
+ log "OpenSSL libraries in: $OPENSSL_LIB_DIR"
99
+
100
+ cd "$BUILD_DIR"
101
+
102
+ # Build MIT Kerberos (krb5)
103
+ log "Building MIT Kerberos $KRB5_VERSION for ARM64..."
104
+ KRB5_PREFIX="$DEPS_PREFIX/static-krb5-$KRB5_VERSION-arm64"
105
+ KRB5_TARBALL="krb5-$KRB5_VERSION.tar.gz"
106
+ KRB5_DIR="krb5-$KRB5_VERSION"
107
+
108
+ secure_download "$(get_krb5_url)" "$KRB5_TARBALL"
109
+ extract_if_needed "$KRB5_TARBALL" "$KRB5_DIR"
110
+ cd "$KRB5_DIR/src"
111
+
112
+ if [ ! -f "$KRB5_PREFIX/lib/libgssapi_krb5.a" ]; then
113
+ log "Configuring and building MIT Kerberos for ARM64..."
114
+ make clean 2>/dev/null || true
115
+ ./configure --disable-shared --enable-static --prefix="$KRB5_PREFIX" \
116
+ --without-ldap --without-tcl --without-keyutils \
117
+ --disable-rpath --without-system-verto \
118
+ CFLAGS="-fPIC" CXXFLAGS="-fPIC"
119
+
120
+ # Build everything except the problematic kadmin tools
121
+ log "Building Kerberos (will ignore kadmin build failures)..."
122
+ make -j$(get_cpu_count) || {
123
+ log "Full build failed (expected due to kadmin), continuing with libraries..."
124
+ # The libraries should be built even if kadmin fails
125
+ true
126
+ }
127
+
128
+ # Install what was successfully built
129
+ make install || {
130
+ log "Full install failed, installing individual components..."
131
+ # Try to install the core libraries manually
132
+ make install-mkdirs 2>/dev/null || true
133
+ make -C util install 2>/dev/null || true
134
+ make -C lib install 2>/dev/null || true
135
+ make -C plugins/kdb/db2 install 2>/dev/null || true
136
+ }
137
+
138
+ # Verify we got the essential libraries
139
+ if [ ! -f "$KRB5_PREFIX/lib/libgssapi_krb5.a" ]; then
140
+ error "Failed to build essential Kerberos libraries"
141
+ fi
142
+
143
+ log "MIT Kerberos libraries built successfully"
144
+ else
145
+ log "MIT Kerberos already built, skipping..."
146
+ fi
147
+
148
+ cd "$BUILD_DIR"
149
+
150
+ # Build SASL
151
+ log "Building Cyrus SASL $CYRUS_SASL_VERSION for ARM64..."
152
+ SASL_PREFIX="$DEPS_PREFIX/static-sasl-$CYRUS_SASL_VERSION-arm64"
153
+ SASL_TARBALL="cyrus-sasl-$CYRUS_SASL_VERSION.tar.gz"
154
+ SASL_DIR="cyrus-sasl-$CYRUS_SASL_VERSION"
155
+
156
+ secure_download "$(get_sasl_url)" "$SASL_TARBALL"
157
+ extract_if_needed "$SASL_TARBALL" "$SASL_DIR"
158
+ cd "$SASL_DIR"
159
+
160
+ if [ ! -f "$SASL_PREFIX/lib/libsasl2.a" ]; then
161
+ log "Configuring and building SASL for ARM64..."
162
+ make clean 2>/dev/null || true
163
+ ./configure --disable-shared --enable-static --prefix="$SASL_PREFIX" \
164
+ --without-dblib --disable-gdbm \
165
+ --enable-gssapi="$KRB5_PREFIX" \
166
+ CFLAGS="-fPIC" CXXFLAGS="-fPIC" \
167
+ CPPFLAGS="-I$KRB5_PREFIX/include" \
168
+ LDFLAGS="-L$KRB5_PREFIX/lib"
169
+ make -j$(get_cpu_count)
170
+ make install
171
+ log "SASL built successfully"
172
+ else
173
+ log "SASL already built, skipping..."
174
+ fi
175
+
176
+ cd "$BUILD_DIR"
177
+
178
+ # Build zlib
179
+ log "Building zlib $ZLIB_VERSION for ARM64..."
180
+ ZLIB_PREFIX="$DEPS_PREFIX/static-zlib-$ZLIB_VERSION-arm64"
181
+ ZLIB_TARBALL="zlib-$ZLIB_VERSION.tar.gz"
182
+ ZLIB_DIR="zlib-$ZLIB_VERSION"
183
+
184
+ secure_download "$(get_zlib_url)" "$ZLIB_TARBALL"
185
+ extract_if_needed "$ZLIB_TARBALL" "$ZLIB_DIR"
186
+ cd "$ZLIB_DIR"
187
+
188
+ if [ ! -f "$ZLIB_PREFIX/lib/libz.a" ]; then
189
+ log "Configuring and building zlib for ARM64..."
190
+ make clean 2>/dev/null || true
191
+ export CFLAGS="-fPIC"
192
+ ./configure --prefix="$ZLIB_PREFIX" --static
193
+ make -j$(get_cpu_count)
194
+ make install
195
+ unset CFLAGS
196
+ log "zlib built successfully"
197
+ else
198
+ log "zlib already built, skipping..."
199
+ fi
200
+
201
+ cd "$BUILD_DIR"
202
+
203
+ # Build ZStd
204
+ log "Building ZStd $ZSTD_VERSION for ARM64..."
205
+ ZSTD_PREFIX="$DEPS_PREFIX/static-zstd-$ZSTD_VERSION-arm64"
206
+ ZSTD_TARBALL="zstd-$ZSTD_VERSION.tar.gz"
207
+ ZSTD_DIR="zstd-$ZSTD_VERSION"
208
+
209
+ secure_download "$(get_zstd_url)" "$ZSTD_TARBALL"
210
+ extract_if_needed "$ZSTD_TARBALL" "$ZSTD_DIR"
211
+ cd "$ZSTD_DIR"
212
+
213
+ if [ ! -f "$ZSTD_PREFIX/lib/libzstd.a" ]; then
214
+ log "Building ZStd for ARM64..."
215
+ make clean 2>/dev/null || true
216
+ make lib-mt CFLAGS="-fPIC" PREFIX="$ZSTD_PREFIX" -j$(get_cpu_count)
217
+ # Use standard install target - install-pc may not exist in all versions
218
+ make install PREFIX="$ZSTD_PREFIX"
219
+ log "ZStd built successfully"
220
+ else
221
+ log "ZStd already built, skipping..."
222
+ fi
223
+
224
+ cd "$BUILD_DIR"
225
+
226
+ # Extract and patch librdkafka
227
+ log "Extracting librdkafka..."
228
+ tar xzf "$LIBRDKAFKA_TARBALL"
229
+ cd "librdkafka-$LIBRDKAFKA_VERSION"
230
+
231
+ # Fix permissions and apply patches
232
+ fix_configure_permissions
233
+ apply_patches PATCHES_FOUND
234
+
235
+ # Configure librdkafka
236
+ log "Configuring librdkafka for ARM64..."
237
+
238
+ if [ -f configure ]; then
239
+ log "Using standard configure (autotools)"
240
+ # Export environment variables for configure to pick up
241
+ export CPPFLAGS="-I$KRB5_PREFIX/include"
242
+ export LDFLAGS="-L$KRB5_PREFIX/lib"
243
+
244
+ ./configure --enable-static --disable-shared --disable-curl \
245
+ --enable-gssapi
246
+
247
+ # Clean up environment variables
248
+ unset CPPFLAGS LDFLAGS
249
+ else
250
+ error "No configure script found (checked: configure.self, configure)"
251
+ fi
252
+
253
+ # Build librdkafka
254
+ log "Compiling librdkafka for ARM64..."
255
+ make clean || true
256
+ make -j$(get_cpu_count)
257
+
258
+ # Verify librdkafka.a exists
259
+ if [ ! -f src/librdkafka.a ]; then
260
+ error "librdkafka.a not found after build"
261
+ fi
262
+
263
+ log "librdkafka.a built successfully"
264
+
265
+ # Create self-contained shared library
266
+ log "Creating self-contained librdkafka.so for ARM64..."
267
+
268
+ # Write the export map
269
+ cat > export.map <<'EOF'
270
+ {
271
+ global:
272
+ rd_kafka_*;
273
+ local:
274
+ *;
275
+ };
276
+ EOF
277
+
278
+ # Link everything statically, expose only rd_kafka_* symbols
279
+ aarch64-linux-gnu-gcc -shared -fPIC \
280
+ -Wl,--version-script=export.map \
281
+ -Wl,--whole-archive src/librdkafka.a -Wl,--no-whole-archive \
282
+ -o librdkafka.so \
283
+ "$SASL_PREFIX/lib/libsasl2.a" \
284
+ "$KRB5_PREFIX/lib/libgssapi_krb5.a" \
285
+ "$KRB5_PREFIX/lib/libkrb5.a" \
286
+ "$KRB5_PREFIX/lib/libk5crypto.a" \
287
+ "$KRB5_PREFIX/lib/libcom_err.a" \
288
+ "$KRB5_PREFIX/lib/libkrb5support.a" \
289
+ "$OPENSSL_LIB_DIR/libssl.a" \
290
+ "$OPENSSL_LIB_DIR/libcrypto.a" \
291
+ "$ZLIB_PREFIX/lib/libz.a" \
292
+ "$ZSTD_PREFIX/lib/libzstd.a" \
293
+ -lpthread -lm -ldl -lresolv
294
+
295
+ if [ ! -f librdkafka.so ]; then
296
+ error "Failed to create librdkafka.so"
297
+ fi
298
+
299
+ log "librdkafka.so created successfully"
300
+
301
+ # Verify the build
302
+ log "Verifying build..."
303
+ file librdkafka.so
304
+
305
+ log "Checking dependencies with ldd:"
306
+ ldd librdkafka.so
307
+
308
+ log "Checking for external dependencies (should only show system libraries):"
309
+ EXTERNAL_DEPS=$(nm -D librdkafka.so | grep " U " | grep -v "@GLIBC" || true)
310
+ if [ -n "$EXTERNAL_DEPS" ]; then
311
+ error "Found external dependencies - library is not self-contained: $EXTERNAL_DEPS"
312
+ else
313
+ log "✅ No external dependencies found - library is self-contained!"
314
+ fi
315
+
316
+ # Copy to output directory
317
+ OUTPUT_DIR="$SCRIPT_DIR"
318
+ cp librdkafka.so "$OUTPUT_DIR/"
319
+ log "librdkafka.so copied to: $OUTPUT_DIR/librdkafka.so"
320
+
321
+ # Print summaries
322
+ print_security_summary
323
+ print_build_summary "Linux" "aarch64 GNU" "$OUTPUT_DIR" "librdkafka.so"
324
+
325
+ # Cleanup
326
+ cleanup_build_dir "$BUILD_DIR"
@@ -258,7 +258,18 @@ log "librdkafka.a built successfully"
258
258
  # Create self-contained shared library
259
259
  log "Creating self-contained librdkafka.so..."
260
260
 
261
- gcc -shared -fPIC -Wl,--whole-archive src/librdkafka.a -Wl,--no-whole-archive \
261
+ echo '
262
+ {
263
+ global:
264
+ rd_kafka_*;
265
+ local:
266
+ *;
267
+ };
268
+ ' > export.map
269
+
270
+ gcc -shared -fPIC \
271
+ -Wl,--version-script=export.map \
272
+ -Wl,--whole-archive src/librdkafka.a -Wl,--no-whole-archive \
262
273
  -o librdkafka.so \
263
274
  "$SASL_PREFIX/lib/libsasl2.a" \
264
275
  "$KRB5_PREFIX/lib/libgssapi_krb5.a" \
@@ -625,10 +625,20 @@ do
625
625
  fi
626
626
  done
627
627
 
628
+ echo '
629
+ {
630
+ global:
631
+ rd_kafka_*;
632
+ local:
633
+ *;
634
+ };
635
+ ' > export.map
636
+
628
637
  gcc -shared -fPIC \
629
- -Wl,--whole-archive src/librdkafka.a -Wl,--no-whole-archive \
630
- -o librdkafka.so \
631
- -Wl,-Bstatic \
638
+ -Wl,--version-script=export.map \
639
+ -Wl,--whole-archive src/librdkafka.a -Wl,--no-whole-archive \
640
+ -o librdkafka.so \
641
+ -Wl,-Bstatic \
632
642
  "$SASL_PREFIX/lib/libsasl2.a" \
633
643
  "$KRB5_PREFIX/lib/libgssapi_krb5.a" \
634
644
  "$KRB5_PREFIX/lib/libkrb5.a" \
@@ -639,11 +649,11 @@ gcc -shared -fPIC \
639
649
  "$OPENSSL_LIB_DIR/libcrypto.a" \
640
650
  "$ZLIB_PREFIX/lib/libz.a" \
641
651
  "$ZSTD_PREFIX/lib/libzstd.a" \
642
- -Wl,-Bdynamic \
643
- -lpthread -lm -ldl -lc \
644
- -static-libgcc \
645
- -Wl,--as-needed \
646
- -Wl,--no-undefined
652
+ -Wl,-Bdynamic \
653
+ -lpthread -lm -ldl -lc \
654
+ -static-libgcc \
655
+ -Wl,--as-needed \
656
+ -Wl,--no-undefined
647
657
 
648
658
  if [ ! -f librdkafka.so ]; then
649
659
  error "Failed to create librdkafka.so"