delivery_boy 1.3.1 → 2.0.0.alpha.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7248183554e5a0d0bc54b355fe18595ffa6e3cbb6cc240c13025d3d15f9f1768
4
- data.tar.gz: 2a58d4099652cb784853169a9fa1569c3d2c1902d1523a91d0fffad20467a235
3
+ metadata.gz: d8bca84f797ae282f1568a53d1542f0bca409e56eaf27c079607e191ccb8841a
4
+ data.tar.gz: 012ee10f7057df815147ce29bb135d3f8397f1454a9fdff95afa72194c06e884
5
5
  SHA512:
6
- metadata.gz: c4b00f742af460bcc6f76b0c04b54352fc370156a8a80c7a7f0a53915764f93195c804689a0782b267d42c534a2657e78c003cd0fb56cd016fd162bdf3c03622
7
- data.tar.gz: 86844e67ddab037b4cb69cc953f19b1e9b3576db7dd8b0815b26e86f0874994ac830f8d1ee67b14dc21e7ad272e25e0d4bdb6ab5c74a50b40a35815c2f70f5da
6
+ metadata.gz: 9381929d414fba52aabadc9d483c478bf1968b357db832825d57eedfd214f544cc95fdfee169ae54fb10f07f45ac485f22d3bc17198a614b649295b8341e9363
7
+ data.tar.gz: 598705aa9268eb0985262958ca4d2d1979d57a06bac366e99113ae302fb6935d5104053389f97236cdfb714430642a383284707b2a5ad9fbdc1615658671e371
@@ -25,11 +25,6 @@ jobs:
25
25
  ruby-version: ['2.6', '2.7', '3.0', '3.1', '3.2', '3.3', '3.4']
26
26
 
27
27
  steps:
28
- - name: Run Confluent Platform (Confluent Server)
29
- uses: zendesk/cp-all-in-one-action@v0.2.1
30
- with:
31
- service: broker
32
-
33
28
  - uses: actions/checkout@v4
34
29
 
35
30
  - name: Set up Ruby
@@ -38,10 +33,17 @@ jobs:
38
33
  ruby-version: ${{ matrix.ruby-version }}
39
34
  bundler-cache: true # runs 'bundle install' and caches installed gems automatically
40
35
 
41
- - name: Wait for broker to boot
42
- run: 'while ! nc -z localhost 9092; do echo -n "."; sleep 0.1; done'
43
-
44
36
  - name: Run tests
45
37
  run: bundle exec rake
46
- env:
47
- DELIVERY_BOY_BROKERS: localhost:9092
38
+
39
+ lint:
40
+ runs-on: ubuntu-latest
41
+ steps:
42
+ - uses: actions/checkout@v5
43
+
44
+ - name: Set up Ruby
45
+ uses: ruby/setup-ruby@v1
46
+ with:
47
+ ruby-version: ruby
48
+ bundler-cache: true
49
+ - run: bundle exec rake standard
@@ -0,0 +1,19 @@
1
+ name: "CodeQL public repository scanning"
2
+
3
+ on:
4
+ push:
5
+ schedule:
6
+ - cron: "0 0 * * *"
7
+ pull_request_target:
8
+ types: [opened, synchronize, reopened]
9
+ workflow_dispatch:
10
+
11
+ permissions:
12
+ contents: read
13
+ security-events: write
14
+ actions: read
15
+ packages: read
16
+
17
+ jobs:
18
+ trigger-codeql:
19
+ uses: zendesk/prodsec-code-scanning/.github/workflows/codeql_advanced_shared.yml@production
data/.rspec CHANGED
@@ -1,2 +1,2 @@
1
- --format documentation
1
+ --order rand
2
2
  --color
data/.standard.yml ADDED
@@ -0,0 +1 @@
1
+ ruby_version: 2.6
data/CHANGELOG CHANGED
@@ -2,6 +2,9 @@
2
2
 
3
3
  ## Unreleased
4
4
 
5
+ * `compression_codec` in the `DeliveryBoy::Config` is now coerces its value into
6
+ a symbol if the value is present.
7
+
5
8
  ## v1.3.1
6
9
 
7
10
  * Remove the `Kafka::Producer` instance from thread local variable when calling
data/Gemfile CHANGED
@@ -4,5 +4,9 @@ gemspec
4
4
 
5
5
  gem "base64"
6
6
  gem "bigdecimal"
7
+ gem "debug"
8
+ gem "logger"
7
9
  gem "rake", "~> 13.0"
8
10
  gem "rspec", "~> 3.0"
11
+ gem "standard", "~> 1.51.1" if RUBY_VERSION > "3.0"
12
+ gem "testcontainers-kafka", github: "testcontainers/testcontainers-ruby"
data/Rakefile CHANGED
@@ -1,7 +1,12 @@
1
1
  require "bundler/setup"
2
2
  require "bundler/gem_tasks"
3
+ begin
4
+ require "standard/rake"
5
+ rescue LoadError
6
+ :noop
7
+ end
3
8
  require "rspec/core/rake_task"
4
9
 
5
10
  RSpec::Core::RakeTask.new(:spec)
6
11
 
7
- task :default => :spec
12
+ task default: :spec
data/SECURITY.md ADDED
@@ -0,0 +1,5 @@
1
+ # Security Policy
2
+
3
+ ## Reporting a Vulnerability
4
+
5
+ Please report security vulnerabilities by e-mailing: security@zendesk.com
data/delivery_boy.gemspec CHANGED
@@ -1,25 +1,25 @@
1
- # coding: utf-8
2
1
  lib = File.expand_path("../lib", __FILE__)
3
2
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
3
  require "delivery_boy/version"
5
4
 
6
5
  Gem::Specification.new do |spec|
7
- spec.name = "delivery_boy"
8
- spec.version = DeliveryBoy::VERSION
9
- spec.authors = ["Daniel Schierbeck"]
10
- spec.email = ["daniel.schierbeck@gmail.com"]
6
+ spec.name = "delivery_boy"
7
+ spec.version = DeliveryBoy::VERSION
8
+ spec.authors = ["Daniel Schierbeck"]
9
+ spec.email = ["daniel.schierbeck@gmail.com"]
11
10
 
12
- spec.summary = "A simple way to produce messages to Kafka from Ruby applications"
13
- spec.description = "A simple way to produce messages to Kafka from Ruby applications"
14
- spec.homepage = "https://github.com/zendesk/delivery_boy"
15
- spec.license = "Apache License Version 2.0"
11
+ spec.summary = "A simple way to produce messages to Kafka from Ruby applications"
12
+ spec.description = "A simple way to produce messages to Kafka from Ruby applications"
13
+ spec.homepage = "https://github.com/zendesk/delivery_boy"
14
+ spec.license = "Apache License Version 2.0"
16
15
 
17
- spec.files = `git ls-files -z`.split("\x0").reject do |f|
16
+ spec.files = `git ls-files -z`.split("\x0").reject do |f|
18
17
  f.match(%r{^(test|spec|features)/})
19
18
  end
20
19
 
21
20
  spec.require_paths = ["lib"]
22
21
 
23
- spec.add_dependency "ruby-kafka", "~> 1.5"
24
- spec.add_dependency "king_konf", "~> 1.0"
22
+ # spec.add_runtime_dependency "ruby-kafka", "~> 1.0"
23
+ spec.add_runtime_dependency "king_konf", "~> 1.0"
24
+ spec.add_runtime_dependency "rdkafka", "> 0.11"
25
25
  end
@@ -4,6 +4,33 @@ module DeliveryBoy
4
4
  class Config < KingKonf::Config
5
5
  env_prefix :delivery_boy
6
6
 
7
+ def connection_timeout_ms
8
+ connect_timeout * 1000
9
+ end
10
+
11
+ def socket_timeout_ms
12
+ socket_timeout * 1000
13
+ end
14
+
15
+ def transactional_timeout_ms
16
+ transactional_timeout * 1000
17
+ end
18
+
19
+ def isolation_level
20
+ transactional ? "read_uncommitted" : "read_committed"
21
+ end
22
+
23
+ def max_buffer_kbytesize
24
+ max_buffer_bytesize / 1024
25
+ end
26
+
27
+ def delivery_interval_ms
28
+ delivery_interval * 1000
29
+ end
30
+
31
+ def sasl_username
32
+ end
33
+
7
34
  # Basic
8
35
  list :brokers, items: :string, sep: ",", default: ["localhost:9092"]
9
36
  string :client_id, default: "delivery_boy"
@@ -27,11 +54,12 @@ module DeliveryBoy
27
54
  integer :retry_backoff, default: 1
28
55
  boolean :idempotent, default: false
29
56
  boolean :transactional, default: false
57
+ string :transactional_id, default: nil
30
58
  integer :transactional_timeout, default: 60
31
59
 
32
60
  # Compression
33
- integer :compression_threshold, default: 1
34
- string :compression_codec, default: nil
61
+ integer :compression_threshold, default: 1 # deprecated, not an option for RdKafka
62
+ string :compression_codec, default: "none"
35
63
 
36
64
  # SSL authentication
37
65
  string :ssl_ca_cert, default: nil
@@ -42,10 +70,12 @@ module DeliveryBoy
42
70
  boolean :ssl_ca_certs_from_system, default: false
43
71
  boolean :ssl_verify_hostname, default: true
44
72
 
73
+ # Supported: GSSAPI, PLAIN, SCRAM-SHA-256, SCRAM-SHA-512, OAUTHBEARER
74
+ string :sasl_mechanism, default: "GSSAPI"
45
75
  # SASL authentication
46
76
  string :sasl_gssapi_principal
47
77
  string :sasl_gssapi_keytab
48
- string :sasl_plain_authzid, default: ''
78
+ string :sasl_plain_authzid, default: ""
49
79
  string :sasl_plain_username
50
80
  string :sasl_plain_password
51
81
  string :sasl_scram_username
@@ -1,4 +1,3 @@
1
1
  module DeliveryBoy
2
2
  ConfigError = Class.new(StandardError)
3
3
  end
4
-
@@ -1,5 +1,4 @@
1
1
  module DeliveryBoy
2
-
3
2
  # A fake implementation that is useful for testing.
4
3
  class Fake
5
4
  FakeMessage = Struct.new(:value, :topic, :key, :headers, :offset, :partition, :partition_key, :create_time) do
@@ -9,8 +8,8 @@ module DeliveryBoy
9
8
  end
10
9
 
11
10
  def initialize
12
- @messages = Hash.new {|h, k| h[k] = [] }
13
- @buffer = Hash.new {|h, k| h[k] = [] }
11
+ @messages = Hash.new { |h, k| h[k] = [] }
12
+ @buffer = Hash.new { |h, k| h[k] = [] }
14
13
  @delivery_lock = Mutex.new
15
14
  end
16
15
 
@@ -25,7 +24,7 @@ module DeliveryBoy
25
24
  nil
26
25
  end
27
26
 
28
- alias deliver_async! deliver
27
+ alias_method :deliver_async!, :deliver
29
28
 
30
29
  def produce(value, topic:, key: nil, headers: {}, partition: nil, partition_key: nil, create_time: Time.now)
31
30
  @delivery_lock.synchronize do
@@ -1,49 +1,57 @@
1
1
  module DeliveryBoy
2
-
3
2
  # This class implements the actual logic of DeliveryBoy. The DeliveryBoy module
4
3
  # has a module-level singleton instance.
5
4
  class Instance
6
5
  def initialize(config, logger)
7
6
  @config = config
8
7
  @logger = logger
9
- @async_producer = nil
8
+ @handles = []
10
9
  end
11
10
 
12
11
  def deliver(value, topic:, **options)
13
- sync_producer.produce(value, topic: topic, **options)
14
- sync_producer.deliver_messages
15
- rescue
16
- # Make sure to clear any buffered messages if there's an error.
17
- clear_buffer
18
-
19
- raise
12
+ options_clone = options.clone
13
+ if options[:create_time]
14
+ options_clone[:timestamp] = Time.at(options[:create_time])
15
+ options_clone.delete(:create_time)
16
+ end
17
+
18
+ sync_producer
19
+ .produce(payload: value, topic: topic, **options_clone)
20
+ .wait
20
21
  end
21
22
 
22
23
  def deliver_async!(value, topic:, **options)
23
- async_producer.produce(value, topic: topic, **options)
24
+ options_clone = options.clone
25
+ if options[:create_time]
26
+ options_clone[:timestamp] = Time.at(options[:create_time])
27
+ options_clone.delete(:create_time)
28
+ end
29
+
30
+ async_producer
31
+ .produce(payload: value, topic: topic, **options_clone)
24
32
  end
25
33
 
26
34
  def shutdown
27
- sync_producer.shutdown if sync_producer?
28
- async_producer.shutdown if async_producer?
29
-
30
- Thread.current[:delivery_boy_sync_producer] = nil
35
+ sync_producer.close if sync_producer?
36
+ async_producer.close if async_producer?
31
37
  end
32
38
 
33
39
  def produce(value, topic:, **options)
34
- sync_producer.produce(value, topic: topic, **options)
40
+ handle = sync_producer.produce(payload: value, topic: topic, **options)
41
+ handles.push(handle)
35
42
  end
36
43
 
37
44
  def deliver_messages
38
- sync_producer.deliver_messages
45
+ handles.each(&:wait)
46
+ handles.clear
39
47
  end
40
48
 
41
49
  def clear_buffer
42
- sync_producer.clear_buffer
50
+ handles.clear_buffer
43
51
  end
44
52
 
45
53
  def buffer_size
46
- sync_producer.buffer_size
54
+ handles.size
47
55
  end
48
56
 
49
57
  private
@@ -53,7 +61,7 @@ module DeliveryBoy
53
61
  def sync_producer
54
62
  # We want synchronous producers to be per-thread in order to avoid problems with
55
63
  # concurrent deliveries.
56
- Thread.current[:delivery_boy_sync_producer] ||= kafka.producer(**producer_options)
64
+ Thread.current[:delivery_boy_sync_producer] ||= kafka.producer
57
65
  end
58
66
 
59
67
  def sync_producer?
@@ -63,12 +71,11 @@ module DeliveryBoy
63
71
  def async_producer
64
72
  # The async producer doesn't have to be per-thread, since all deliveries are
65
73
  # performed by a single background thread.
66
- @async_producer ||= kafka.async_producer(
67
- max_queue_size: config.max_queue_size,
68
- delivery_threshold: config.delivery_threshold,
69
- delivery_interval: config.delivery_interval,
70
- **producer_options,
71
- )
74
+ @async_producer ||= Rdkafka::Config.new({
75
+ "bootstrap.servers": config.brokers.join(","),
76
+ "queue.buffering.backpressure.threshold": config.delivery_threshold,
77
+ "queue.buffering.max.ms": config.delivery_interval_ms
78
+ }.merge(producer_options)).producer
72
79
  end
73
80
 
74
81
  def async_producer?
@@ -76,51 +83,58 @@ module DeliveryBoy
76
83
  end
77
84
 
78
85
  def kafka
79
- @kafka ||= Kafka.new(
80
- seed_brokers: config.brokers,
81
- client_id: config.client_id,
82
- logger: logger,
83
- connect_timeout: config.connect_timeout,
84
- socket_timeout: config.socket_timeout,
85
- ssl_ca_cert: config.ssl_ca_cert,
86
- ssl_ca_cert_file_path: config.ssl_ca_cert_file_path,
87
- ssl_client_cert: config.ssl_client_cert,
88
- ssl_client_cert_key: config.ssl_client_cert_key,
89
- ssl_client_cert_key_password: config.ssl_client_cert_key_password,
90
- ssl_ca_certs_from_system: config.ssl_ca_certs_from_system,
91
- ssl_verify_hostname: config.ssl_verify_hostname,
92
- sasl_gssapi_principal: config.sasl_gssapi_principal,
93
- sasl_gssapi_keytab: config.sasl_gssapi_keytab,
94
- sasl_plain_authzid: config.sasl_plain_authzid,
95
- sasl_plain_username: config.sasl_plain_username,
96
- sasl_plain_password: config.sasl_plain_password,
97
- sasl_scram_username: config.sasl_scram_username,
98
- sasl_scram_password: config.sasl_scram_password,
99
- sasl_scram_mechanism: config.sasl_scram_mechanism,
100
- sasl_over_ssl: config.sasl_over_ssl,
101
- sasl_oauth_token_provider: config.sasl_oauth_token_provider,
102
- sasl_aws_msk_iam_access_key_id: config.sasl_aws_msk_iam_access_key_id,
103
- sasl_aws_msk_iam_secret_key_id: config.sasl_aws_msk_iam_secret_key_id,
104
- sasl_aws_msk_iam_session_token: config.sasl_aws_msk_iam_session_token,
105
- sasl_aws_msk_iam_aws_region: config.sasl_aws_msk_iam_aws_region
106
- )
86
+ @kafka ||= Rdkafka::Config.new({
87
+ "bootstrap.servers": config.brokers.join(",")
88
+ }.merge(producer_options))
107
89
  end
108
90
 
109
91
  # Options for both the sync and async producers.
110
92
  def producer_options
93
+ if config.transactional? && config.transactional_id.nil?
94
+ raise "transactional_id must be set"
95
+ end
96
+
111
97
  {
112
- required_acks: config.required_acks,
113
- ack_timeout: config.ack_timeout,
114
- max_retries: config.max_retries,
115
- retry_backoff: config.retry_backoff,
116
- max_buffer_size: config.max_buffer_size,
117
- max_buffer_bytesize: config.max_buffer_bytesize,
118
- compression_codec: (config.compression_codec.to_sym if config.compression_codec),
119
- compression_threshold: config.compression_threshold,
120
- idempotent: config.idempotent,
121
- transactional: config.transactional,
122
- transactional_timeout: config.transactional_timeout,
98
+ "socket.connection.setup.timeout.ms": config.connection_timeout_ms,
99
+ "socket.timeout.ms": config.socket_timeout_ms,
100
+ "request.required.acks": config.required_acks,
101
+ "request.timeout.ms": config.ack_timeout,
102
+ "message.send.max.retries": config.max_retries,
103
+ "retry.backoff.ms": config.retry_backoff,
104
+ "queue.buffering.max.messages": config.max_buffer_size,
105
+ "queue.buffering.max.kbytes": config.max_buffer_bytesize,
106
+ "compression.codec": config.compression_codec, # values none, gzip, snappy, lz4, zstd
107
+ "enable.idempotence": config.idempotent,
108
+ "transactional.id": config.transactional_id,
109
+ "transaction.timeout.ms": config.transactional_timeout_ms,
110
+
111
+ # SSL options
112
+ "ssl.ca.pem": config.ssl_ca_cert,
113
+ "ssl.ca.location": config.ssl_ca_cert_file_path,
114
+ "ssl.certificate.pem": config.ssl_client_cert,
115
+ "ssl.key.pem": config.ssl_client_cert_key,
116
+ "ssl.key.password": config.ssl_client_cert_key_password,
117
+ # ssl_ca_certs_from_system: config.ssl_ca_certs_from_system, # TODO: there is no corresponding librdkafka option. check what this does
118
+ # ssl_verify_hostname: config.ssl_verify_hostname, # check
119
+ "sasl.kerberos.principal": config.sasl_gssapi_principal,
120
+ "sasl.kerberos.keytab": config.sasl_gssapi_keytab
121
+ # sasl_plain_authzid: config.sasl_plain_authzid, # no corresponding librdkafka option, check
122
+ # 'sasl.username': config.sasl_plain_username,
123
+ # 'sasl.password': config.sasl_plain_password,
124
+ # 'sasl.username': config.sasl_scram_username,
125
+ # 'sasl.passord': config.sasl_scram_password,
126
+ # 'sasl.mechanism': config.sasl_scram_mechanism,
127
+ # sasl_over_ssl: config.sasl_over_ssl, # conditional value check again
128
+ # sasl_oauth_token_provider: config.sasl_oauth_token_provider, # cb code
129
+ # sasl_aws_msk_iam_access_key_id: config.sasl_aws_msk_iam_access_key_id, # not supported
130
+ # sasl_aws_msk_iam_secret_key_id: config.sasl_aws_msk_iam_secret_key_id, # not supported
131
+ # sasl_aws_msk_iam_session_token: config.sasl_aws_msk_iam_session_token, # not supported
132
+ # sasl_aws_msk_iam_aws_region: config.sasl_aws_msk_iam_aws_region # not supported
123
133
  }
124
134
  end
135
+
136
+ private
137
+
138
+ attr_reader :handles
125
139
  end
126
140
  end
@@ -1,3 +1,3 @@
1
1
  module DeliveryBoy
2
- VERSION = "1.3.1"
2
+ VERSION = "2.0.0.alpha.1"
3
3
  end
data/lib/delivery_boy.rb CHANGED
@@ -1,15 +1,15 @@
1
1
  require "logger"
2
- require "kafka"
2
+ # require "kafka"
3
3
  require "delivery_boy/version"
4
4
  require "delivery_boy/instance"
5
5
  require "delivery_boy/fake"
6
6
  require "delivery_boy/config"
7
7
  require "delivery_boy/config_error"
8
8
  require "delivery_boy/railtie" if defined?(Rails::Railtie)
9
+ require "rdkafka"
9
10
 
10
11
  module DeliveryBoy
11
12
  class << self
12
-
13
13
  # Write a message to a specified Kafka topic synchronously.
14
14
  #
15
15
  # Keep in mind that the client will block until the message has been
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: delivery_boy
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.3.1
4
+ version: 2.0.0.alpha.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck
@@ -10,33 +10,33 @@ cert_chain: []
10
10
  date: 1980-01-02 00:00:00.000000000 Z
11
11
  dependencies:
12
12
  - !ruby/object:Gem::Dependency
13
- name: ruby-kafka
13
+ name: king_konf
14
14
  requirement: !ruby/object:Gem::Requirement
15
15
  requirements:
16
16
  - - "~>"
17
17
  - !ruby/object:Gem::Version
18
- version: '1.5'
18
+ version: '1.0'
19
19
  type: :runtime
20
20
  prerelease: false
21
21
  version_requirements: !ruby/object:Gem::Requirement
22
22
  requirements:
23
23
  - - "~>"
24
24
  - !ruby/object:Gem::Version
25
- version: '1.5'
25
+ version: '1.0'
26
26
  - !ruby/object:Gem::Dependency
27
- name: king_konf
27
+ name: rdkafka
28
28
  requirement: !ruby/object:Gem::Requirement
29
29
  requirements:
30
- - - "~>"
30
+ - - ">"
31
31
  - !ruby/object:Gem::Version
32
- version: '1.0'
32
+ version: '0.11'
33
33
  type: :runtime
34
34
  prerelease: false
35
35
  version_requirements: !ruby/object:Gem::Requirement
36
36
  requirements:
37
- - - "~>"
37
+ - - ">"
38
38
  - !ruby/object:Gem::Version
39
- version: '1.0'
39
+ version: '0.11'
40
40
  description: A simple way to produce messages to Kafka from Ruby applications
41
41
  email:
42
42
  - daniel.schierbeck@gmail.com
@@ -45,15 +45,18 @@ extensions: []
45
45
  extra_rdoc_files: []
46
46
  files:
47
47
  - ".github/workflows/ci.yml"
48
+ - ".github/workflows/codeql.yaml"
48
49
  - ".github/workflows/publish.yml"
49
50
  - ".github/workflows/stale.yml"
50
51
  - ".gitignore"
51
52
  - ".rspec"
53
+ - ".standard.yml"
52
54
  - CHANGELOG
53
55
  - Gemfile
54
56
  - LICENSE.txt
55
57
  - README.md
56
58
  - Rakefile
59
+ - SECURITY.md
57
60
  - bin/console
58
61
  - bin/setup
59
62
  - delivery_boy.gemspec
@@ -87,7 +90,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
87
90
  - !ruby/object:Gem::Version
88
91
  version: '0'
89
92
  requirements: []
90
- rubygems_version: 3.6.7
93
+ rubygems_version: 3.6.9
91
94
  specification_version: 4
92
95
  summary: A simple way to produce messages to Kafka from Ruby applications
93
96
  test_files: []