racecar 2.0.0.beta3 → 2.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: cb8fe69f83627b7d4171bbb4559fee8b28adf069546b41c62c566ae88e13047f
4
- data.tar.gz: d8df951b4b2dd42023d65af45db3e5dca5cff008b7ed16a0a021010f240eba70
3
+ metadata.gz: 90dddff7c52ef8440ab154be364aacd71f687e7121eaa3456e6b580c3717c294
4
+ data.tar.gz: '092eb0699d0e60537840cd0114497003ee32e18d611a5b386d430fdfeabd7e03'
5
5
  SHA512:
6
- metadata.gz: fd6c60e893a61255ec9c5326cb05a087d8883e8bcfa369570b1f60f6e02106992fda445f29378de69a6ab79a6ae96b41ed630591e58ccdad71f965e808c479d6
7
- data.tar.gz: e5b6d87edefe41262bc479d712160ebcc95ff3627ffeadd08d103d7eb030721e44e55d82895a2cffb8b5e594be211ea0f03f473eb503b2a3a02e760d5a1524d7
6
+ metadata.gz: cd4d053961a7f228aeda34bb01e758f9caa6356cf4fb7a3e773c14b9976c19ab30587ce23a010e31b115f30d85999bb38c120e0313b99b43cc744b71f273fb3c
7
+ data.tar.gz: d0d1865ef2c7adcf94e1bf9c551d7bcdc02a223ee50fbfe4e7293b89629ca7853c55c5cfe144ce879c8aa28c3b8939dcd87fc01c54d441369cfe3c4a186643f3
@@ -0,0 +1,56 @@
1
+ version: 2.1
2
+ orbs:
3
+ ruby: circleci/ruby@0.1.2
4
+
5
+ jobs:
6
+ build:
7
+ docker:
8
+ - image: circleci/ruby:2.6.3-stretch-node
9
+ executor: ruby/default
10
+ steps:
11
+ - checkout
12
+ - run:
13
+ name: Which bundler?
14
+ command: bundle -v
15
+ - ruby/bundle-install
16
+ - run: bundle exec rspec --exclude-pattern='spec/integration/*_spec.rb'
17
+ integration-tests:
18
+ docker:
19
+ - image: circleci/ruby:2.6.3-stretch-node
20
+ - image: wurstmeister/zookeeper
21
+ - image: wurstmeister/kafka:2.11-2.0.0
22
+ environment:
23
+ KAFKA_ADVERTISED_HOST_NAME: localhost
24
+ KAFKA_ADVERTISED_PORT: 9092
25
+ KAFKA_PORT: 9092
26
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
27
+ KAFKA_DELETE_TOPIC_ENABLE: true
28
+ - image: wurstmeister/kafka:2.11-2.0.0
29
+ environment:
30
+ KAFKA_ADVERTISED_HOST_NAME: localhost
31
+ KAFKA_ADVERTISED_PORT: 9093
32
+ KAFKA_PORT: 9093
33
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
34
+ KAFKA_DELETE_TOPIC_ENABLE: true
35
+ - image: wurstmeister/kafka:2.11-2.0.0
36
+ environment:
37
+ KAFKA_ADVERTISED_HOST_NAME: localhost
38
+ KAFKA_ADVERTISED_PORT: 9094
39
+ KAFKA_PORT: 9094
40
+ KAFKA_ZOOKEEPER_CONNECT: localhost:2181
41
+ KAFKA_DELETE_TOPIC_ENABLE: true
42
+ executor: ruby/default
43
+ steps:
44
+ - checkout
45
+ - run:
46
+ name: Which bundler?
47
+ command: bundle -v
48
+ - ruby/bundle-install
49
+ - run: bundle exec rspec --pattern='spec/integration/*_spec.rb'
50
+
51
+ workflows:
52
+ version: 2
53
+ test:
54
+ jobs:
55
+ - build
56
+ - integration-tests
@@ -1,30 +1,39 @@
1
1
  # Changelog
2
2
 
3
+ ## Unreleased
4
+
5
+ ## racecar v2.1.0
6
+
7
+ * Bump rdkafka to 0.8.0 (#191)
8
+
3
9
  ## racecar v2.0.0
4
10
 
5
- * Replace `ruby-kafka` with `rdkafka-ruby`
6
- * Removed config option `sasl_over_ssl`
7
- * [Racecar::Consumer] Do not pause consuming partitions on exception
8
- * [Racecar::Consumer] `topic`, `payload` and `key` are mandadory to method `produce`
9
- * [Racecar::Consumer] `process_batch` retrieves an array of messages instead of batch object
10
- * [Racecar::Consumer] Remove `offset_retention_time`
11
- * [Racecar::Consumer] Allow providing `additional_config` for subscriptions
12
- * [Racecar::Consumer] Provide access to `producer` and `consumer`
13
- * [Racecar::Consumer] Enforce delivering messages with method `deliver!`
14
- * [Racecar::Consumer] instead of raising when a partition EOF is reached, the result can be queried through `consumer.last_poll_read_partition_eof?`
15
- * [Racecar::Config] Remove `offset_retention_time`, `connect_timeout` and `offset_commit_threshold`
16
- * [Racecar::Config] Pass config to `rdkafka-ruby` via `producer` and `consumer`
17
- * [Racecar::Config] Replace `max_fetch_queue_size` with `min_message_queue_size`
18
- * [Racecar::Config] Add `synchronous_commits` to control blocking of `consumer.commit` (default `false`)
19
- * [Racecar::Config] Add `security_protocol` to control protocol between client and broker
20
- * [Racecar::Config] SSL configuration via `ssl_ca_location`, `ssl_crl_location`, `ssl_keystore_location` and `ssl_keystore_password`
21
- * [Racecar::Config] SASL configuration via `sasl_mechanism`, `sasl_kerberos_service_name`, `sasl_kerberos_principal`, `sasl_kerberos_kinit_cmd`, `sasl_kerberos_keytab`, `sasl_kerberos_min_time_before_relogin`, `sasl_username` and `sasl_password`
11
+ * Replace `ruby-kafka` with `rdkafka-ruby` as the low-level library underneath Racecar (#91).
12
+ * Fix `max_wait_time` usage (#179).
13
+ * Removed config option `sasl_over_ssl`.
14
+ * [Racecar::Consumer] Do not pause consuming partitions on exception.
15
+ * [Racecar::Consumer] `topic`, `payload` and `key` are mandadory to method `produce`.
16
+ * [Racecar::Consumer] `process_batch` retrieves an array of messages instead of batch object.
17
+ * [Racecar::Consumer] Remove `offset_retention_time`.
18
+ * [Racecar::Consumer] Allow providing `additional_config` for subscriptions.
19
+ * [Racecar::Consumer] Provide access to `producer` and `consumer`.
20
+ * [Racecar::Consumer] Enforce delivering messages with method `deliver!`.
21
+ * [Racecar::Consumer] instead of raising when a partition EOF is reached, the result can be queried through `consumer.last_poll_read_partition_eof?`.
22
+ * [Racecar::Config] Remove `offset_retention_time`, `connect_timeout` and `offset_commit_threshold`.
23
+ * [Racecar::Config] Pass config to `rdkafka-ruby` via `producer` and `consumer`.
24
+ * [Racecar::Config] Replace `max_fetch_queue_size` with `min_message_queue_size`.
25
+ * [Racecar::Config] Add `synchronous_commits` to control blocking of `consumer.commit` (default `false`).
26
+ * [Racecar::Config] Add `security_protocol` to control protocol between client and broker.
27
+ * [Racecar::Config] SSL configuration via `ssl_ca_location`, `ssl_crl_location`, `ssl_keystore_location` and `ssl_keystore_password`.
28
+ * [Racecar::Config] SASL configuration via `sasl_mechanism`, `sasl_kerberos_service_name`, `sasl_kerberos_principal`, `sasl_kerberos_kinit_cmd`, `sasl_kerberos_keytab`, `sasl_kerberos_min_time_before_relogin`, `sasl_username` and `sasl_password`.
22
29
  * [Instrumentation] `produce_message.racecar` sent whenever a produced message is queued. Payload includes `topic`, `key`, `value` and `create_time`.
23
30
  * [Instrumentation] `acknowledged_message.racecar` send whenever a produced message was successfully received by Kafka. Payload includes `offset` and `partition`, but no message details.
24
- * [Instrumentation] `rdkafka-ruby` does not yet provide instrumentation [rdkafka-ruby#54](https://github.com/appsignal/rdkafka-ruby/issues/54)
25
- * [Instrumentation] if processors define a `statistics_callback`, it will be called once every second for every subscription or producer connection. The first argument will be a Hash, for contents see [librdkafka STATISTICS.md](https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md)
31
+ * [Instrumentation] `rdkafka-ruby` does not yet provide instrumentation [rdkafka-ruby#54](https://github.com/appsignal/rdkafka-ruby/issues/54).
32
+ * [Instrumentation] if processors define a `statistics_callback`, it will be called once every second for every subscription or producer connection. The first argument will be a Hash, for contents see [librdkafka STATISTICS.md](https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md).
26
33
  * Add current directory to `$LOAD_PATH` only when `--require` option is used (#117).
27
- * Remove manual heartbeat support, see [Long-running message processing section in README](README.md#long-running-message-processing)
34
+ * Remove manual heartbeat support, see [Long-running message processing section in README](README.md#long-running-message-processing).
35
+ * Rescue exceptions--then log and pass to `on_error`--at the outermost level of `exe/racecar`, so that exceptions raised outside `Cli.run` are not silently discarded (#186).
36
+ * When exceptions with a `cause` are logged, recursively log the `cause` detail, separated by `--- Caused by: ---\n`.
28
37
 
29
38
  ## racecar v1.0.0
30
39
 
data/Gemfile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  source 'https://rubygems.org'
2
4
 
3
5
  # Specify your gem's dependencies in racecar.gemspec
data/README.md CHANGED
@@ -1,5 +1,3 @@
1
- **IMPORTANT:** The `master` branch is unstable, working towards a v2 release that breaks a lot of stuff. Use the `v1-stable` branch if you want to suggest changes.
2
-
3
1
  # Racecar
4
2
 
5
3
  Racecar is a friendly and easy-to-approach Kafka consumer framework. It allows you to write small applications that process messages stored in Kafka topics while optionally integrating with your Rails models.
@@ -21,6 +19,7 @@ The framework is based on [rdkafka-ruby](https://github.com/appsignal/rdkafka-ru
21
19
  7. [Handling errors](#handling-errors)
22
20
  8. [Logging](#logging)
23
21
  9. [Operations](#operations)
22
+ 10. [Upgrading from v1 to v2](#upgrading-from-v1-to-v2)
24
23
  3. [Development](#development)
25
24
  4. [Contributing](#contributing)
26
25
  5. [Support and Discussion](#support-and-discussion)
@@ -288,6 +287,9 @@ The memory usage limit is roughly estimated as `max_bytes * min_message_queue_si
288
287
  * `ssl_crl_location` – Path to CRL for verifying broker's certificate validity
289
288
  * `ssl_keystore_location` – Path to client's keystore (PKCS#12) used for authentication
290
289
  * `ssl_keystore_password` – Client's keystore (PKCS#12) password
290
+ * `ssl_certificate_location` – Path to the certificate
291
+ * `ssl_key_location` – Path to client's certificate used for authentication
292
+ * `ssl_key_password` – Client's certificate password
291
293
 
292
294
  #### SASL encryption, authentication & authorization
293
295
 
@@ -464,7 +466,7 @@ Racecar.config.on_error do |exception, info|
464
466
  end
465
467
  ```
466
468
 
467
- It is highly recommended that you set up an error handler.
469
+ It is highly recommended that you set up an error handler. Please note that the `info` object contains different keys and values depending on whether you are using `process` or `process_batch`. See the `instrumentation_payload` object in the `process` and `process_batch` methods in the `Runner` class for the complete list.
468
470
 
469
471
 
470
472
  ### Logging
@@ -481,6 +483,11 @@ In order to gracefully shut down a Racecar consumer process, send it the `SIGTER
481
483
  In order to introspect the configuration of a consumer process, send it the `SIGUSR1` signal. This will make Racecar print its configuration to the standard error file descriptor associated with the consumer process, so you'll need to know where that is written to.
482
484
 
483
485
 
486
+ ### Upgrading from v1 to v2
487
+
488
+ In order to safely upgrade from Racecar v1 to v2, you need to completely shut down your consumer group before starting it up again with the v2 Racecar dependency. In general, you should avoid rolling deploys for consumers groups, so it is likely the case that this will just work for you, but it's a good idea to check first.
489
+
490
+
484
491
  ## Development
485
492
 
486
493
  After checking out the repo, run `bin/setup` to install dependencies. Then, run `rspec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
data/Rakefile CHANGED
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "bundler/gem_tasks"
2
4
  require "rspec/core/rake_task"
3
5
 
@@ -0,0 +1,32 @@
1
+ version: '2'
2
+ services:
3
+ zookeeper:
4
+ image: confluentinc/cp-zookeeper:5.5.1
5
+ hostname: zookeeper
6
+ container_name: zookeeper
7
+ ports:
8
+ - "2181:2181"
9
+ environment:
10
+ ZOOKEEPER_CLIENT_PORT: 2181
11
+ ZOOKEEPER_TICK_TIME: 2000
12
+
13
+ broker:
14
+ image: confluentinc/cp-kafka:5.5.1
15
+ hostname: broker
16
+ container_name: broker
17
+ depends_on:
18
+ - zookeeper
19
+ ports:
20
+ - "29092:29092"
21
+ - "9092:9092"
22
+ - "9101:9101"
23
+ environment:
24
+ KAFKA_BROKER_ID: 1
25
+ KAFKA_ZOOKEEPER_CONNECT: 'zookeeper:2181'
26
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: PLAINTEXT:PLAINTEXT,PLAINTEXT_HOST:PLAINTEXT
27
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://broker:29092,PLAINTEXT_HOST://localhost:9092
28
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
29
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
30
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
31
+ KAFKA_GROUP_INITIAL_REBALANCE_DELAY_MS: 0
32
+ KAFKA_JMX_PORT: 9101
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class BatchConsumer < Racecar::Consumer
2
4
  subscribes_to "messages", start_from_beginning: false
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class CatConsumer < Racecar::Consumer
2
4
  subscribes_to "messages", start_from_beginning: false
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  class ProducingConsumer < Racecar::Consumer
2
4
  subscribes_to "messages", start_from_beginning: false
3
5
 
@@ -3,17 +3,42 @@
3
3
  require "racecar"
4
4
  require "racecar/cli"
5
5
 
6
- begin
7
- Racecar::Cli.main(ARGV)
8
- rescue SignalException => e
9
- # We might receive SIGTERM before our signal handler is installed.
10
- if Signal.signame(e.signo) == "TERM"
11
- exit(0)
12
- else
13
- raise
6
+ module Racecar
7
+ class << self
8
+ def start(argv)
9
+ Cli.main(argv)
10
+ rescue SignalException => e
11
+ # We might receive SIGTERM before our signal handler is installed.
12
+ if Signal.signame(e.signo) == "TERM"
13
+ exit(0)
14
+ else
15
+ raise
16
+ end
17
+ rescue SystemExit
18
+ raise
19
+ rescue Exception => e
20
+ $stderr.puts "=> Crashed: #{exception_with_causes(e)}\n#{e.backtrace.join("\n")}"
21
+
22
+ Racecar.config.error_handler.call(e)
23
+
24
+ exit(1)
25
+ else
26
+ exit(0)
27
+ end
28
+
29
+ private
30
+
31
+ def exception_with_causes(e)
32
+ result = +"#{e.class}: #{e}"
33
+ if e.cause
34
+ result << "\n"
35
+ result << "--- Caused by: ---\n"
36
+ result << exception_with_causes(e.cause)
37
+ end
38
+ result
39
+ end
14
40
  end
15
- rescue
16
- exit(1)
17
- else
18
- exit(0)
19
41
  end
42
+
43
+ # Start your engines!
44
+ Racecar.start(ARGV)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  # only needed when ruby < 2.4 and not using active support
2
4
 
3
5
  unless {}.respond_to? :compact
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  module Generators
3
5
  class ConsumerGenerator < Rails::Generators::NamedBase
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  module Generators
3
5
  class InstallGenerator < Rails::Generators::Base
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "logger"
2
4
 
3
5
  require "racecar/instrumenter"
@@ -6,6 +8,7 @@ require "racecar/consumer"
6
8
  require "racecar/consumer_set"
7
9
  require "racecar/runner"
8
10
  require "racecar/config"
11
+ require "racecar/version"
9
12
  require "ensure_hash_compact"
10
13
 
11
14
  module Racecar
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "optparse"
2
4
  require "logger"
3
5
  require "fileutils"
@@ -6,8 +8,10 @@ require "racecar/daemon"
6
8
 
7
9
  module Racecar
8
10
  class Cli
9
- def self.main(args)
10
- new(args).run
11
+ class << self
12
+ def main(args)
13
+ new(args).run
14
+ end
11
15
  end
12
16
 
13
17
  def initialize(args)
@@ -16,10 +20,6 @@ module Racecar
16
20
  @consumer_name = args.first or raise Racecar::Error, "no consumer specified"
17
21
  end
18
22
 
19
- def config
20
- Racecar.config
21
- end
22
-
23
23
  def run
24
24
  $stderr.puts "=> Starting Racecar consumer #{consumer_name}..."
25
25
 
@@ -61,18 +61,16 @@ module Racecar
61
61
  processor = consumer_class.new
62
62
 
63
63
  Racecar.run(processor)
64
- rescue => e
65
- $stderr.puts "=> Crashed: #{e.class}: #{e}\n#{e.backtrace.join("\n")}"
66
-
67
- config.error_handler.call(e)
68
-
69
- raise
70
64
  end
71
65
 
72
66
  private
73
67
 
74
68
  attr_reader :consumer_name
75
69
 
70
+ def config
71
+ Racecar.config
72
+ end
73
+
76
74
  def daemonize!
77
75
  daemon = Daemon.new(File.expand_path(config.pidfile))
78
76
 
@@ -110,13 +108,13 @@ module Racecar
110
108
  end
111
109
 
112
110
  Racecar::Config.variables.each do |variable|
113
- opt_name = "--" << variable.name.to_s.gsub("_", "-")
111
+ opt_name = +"--#{variable.name.to_s.gsub('_', '-')}"
114
112
  opt_name << " #{variable.type.upcase}" unless variable.boolean?
115
113
 
116
114
  desc = variable.description || "N/A"
117
115
 
118
116
  if variable.default
119
- desc << " (default: #{variable.default.inspect})"
117
+ desc += " (default: #{variable.default.inspect})"
120
118
  end
121
119
 
122
120
  opts.on(opt_name, desc) do |value|
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "king_konf"
2
4
 
3
5
  module Racecar
@@ -28,6 +30,9 @@ module Racecar
28
30
  desc "The maxium number of messages that get consumed within one batch"
29
31
  integer :fetch_messages, default: 1000
30
32
 
33
+ desc "Minimum number of bytes the broker responds with"
34
+ integer :fetch_min_bytes, default: 1
35
+
31
36
  desc "Automatically store offset of last message provided to application"
32
37
  boolean :synchronous_commits, default: false
33
38
 
@@ -82,6 +87,15 @@ module Racecar
82
87
  desc "Client's keystore (PKCS#12) password"
83
88
  string :ssl_keystore_password
84
89
 
90
+ desc "Path to the certificate used for authentication"
91
+ string :ssl_certificate_location
92
+
93
+ desc "Path to client's certificate used for authentication"
94
+ string :ssl_key_location
95
+
96
+ desc "Client's certificate password"
97
+ string :ssl_key_password
98
+
85
99
  desc "SASL mechanism to use for authentication"
86
100
  string :sasl_mechanism, allowed_values: %w{GSSAPI PLAIN SCRAM-SHA-256 SCRAM-SHA-512}
87
101
 
@@ -144,6 +158,10 @@ module Racecar
144
158
 
145
159
  attr_accessor :subscriptions, :logger
146
160
 
161
+ def max_wait_time_ms
162
+ max_wait_time * 1000
163
+ end
164
+
147
165
  def initialize(env: ENV)
148
166
  super(env: env)
149
167
  @error_handler = proc {}
@@ -180,8 +198,8 @@ module Racecar
180
198
  group_id_prefix,
181
199
 
182
200
  # MyFunnyConsumer => my-funny-consumer
183
- consumer_class.name.gsub(/[a-z][A-Z]/) {|str| str[0] << "-" << str[1] }.downcase,
184
- ].compact.join("")
201
+ consumer_class.name.gsub(/[a-z][A-Z]/) { |str| "#{str[0]}-#{str[1]}" }.downcase,
202
+ ].compact.join
185
203
 
186
204
  self.subscriptions = consumer_class.subscriptions
187
205
  self.max_wait_time = consumer_class.max_wait_time || self.max_wait_time
@@ -217,6 +235,9 @@ module Racecar
217
235
  "ssl.crl.location" => ssl_crl_location,
218
236
  "ssl.keystore.location" => ssl_keystore_location,
219
237
  "ssl.keystore.password" => ssl_keystore_password,
238
+ "ssl.certificate.location" => ssl_certificate_location,
239
+ "ssl.key.location" => ssl_key_location,
240
+ "ssl.key.password" => ssl_key_password,
220
241
  "sasl.mechanism" => sasl_mechanism,
221
242
  "sasl.kerberos.service.name" => sasl_kerberos_service_name,
222
243
  "sasl.kerberos.principal" => sasl_kerberos_principal,
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  class Consumer
3
5
  Subscription = Struct.new(:topic, :start_from_beginning, :max_bytes_per_partition, :additional_config)
@@ -54,21 +56,29 @@ module Racecar
54
56
  protected
55
57
 
56
58
  # https://github.com/appsignal/rdkafka-ruby#producing-messages
57
- def produce(payload, topic:, key:, headers: nil, create_time: nil)
59
+ def produce(payload, topic:, key: nil, partition_key: nil, headers: nil, create_time: nil)
58
60
  @delivery_handles ||= []
59
61
  message_size = payload.respond_to?(:bytesize) ? payload.bytesize : 0
60
62
  instrumentation_payload = {
61
- value: payload,
62
- headers: headers,
63
- key: key,
64
- topic: topic,
63
+ value: payload,
64
+ headers: headers,
65
+ key: key,
66
+ partition_key: partition_key,
67
+ topic: topic,
65
68
  message_size: message_size,
66
- create_time: Time.now,
67
- buffer_size: @delivery_handles.size
69
+ create_time: Time.now,
70
+ buffer_size: @delivery_handles.size,
68
71
  }
69
72
 
70
73
  @instrumenter.instrument("produce_message", instrumentation_payload) do
71
- @delivery_handles << @producer.produce(topic: topic, payload: payload, key: key, timestamp: create_time, headers: headers)
74
+ @delivery_handles << @producer.produce(
75
+ topic: topic,
76
+ payload: payload,
77
+ key: key,
78
+ partition_key: partition_key,
79
+ timestamp: create_time,
80
+ headers: headers,
81
+ )
72
82
  end
73
83
  end
74
84
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  class ConsumerSet
3
5
  MAX_POLL_TRIES = 10
@@ -174,8 +176,9 @@ module Racecar
174
176
  "client.id" => @config.client_id,
175
177
  "enable.partition.eof" => false,
176
178
  "fetch.max.bytes" => @config.max_bytes,
177
- "fetch.message.max.bytes" => subscription.max_bytes_per_partition,
178
- "fetch.wait.max.ms" => @config.max_wait_time * 1000,
179
+ "message.max.bytes" => subscription.max_bytes_per_partition,
180
+ "fetch.min.bytes" => @config.fetch_min_bytes,
181
+ "fetch.wait.max.ms" => @config.max_wait_time_ms,
179
182
  "group.id" => @config.group_id,
180
183
  "heartbeat.interval.ms" => @config.heartbeat_interval * 1000,
181
184
  "max.poll.interval.ms" => @config.max_poll_interval * 1000,
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "optparse"
2
4
  require "racecar/rails_config_file_loader"
3
5
  require "racecar/daemon"
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  class Daemon
3
5
  attr_reader :pidfile
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  begin
2
4
  require "datadog/statsd"
3
5
  rescue LoadError
@@ -26,7 +28,7 @@ module Racecar
26
28
  end
27
29
 
28
30
  def host
29
- @host ||= default_host
31
+ @host
30
32
  end
31
33
 
32
34
  def host=(host)
@@ -35,7 +37,7 @@ module Racecar
35
37
  end
36
38
 
37
39
  def port
38
- @port ||= default_port
40
+ @port
39
41
  end
40
42
 
41
43
  def port=(port)
@@ -63,22 +65,6 @@ module Racecar
63
65
 
64
66
  private
65
67
 
66
- def default_host
67
- if ::Datadog::Statsd.const_defined?(:Connection)
68
- ::Datadog::Statsd::Connection::DEFAULT_HOST
69
- else
70
- ::Datadog::Statsd::DEFAULT_HOST
71
- end
72
- end
73
-
74
- def default_port
75
- if ::Datadog::Statsd.const_defined?(:Connection)
76
- ::Datadog::Statsd::Connection::DEFAULT_PORT
77
- else
78
- ::Datadog::Statsd::DEFAULT_PORT
79
- end
80
- end
81
-
82
68
  def clear
83
69
  @statsd && @statsd.close
84
70
  @statsd = nil
@@ -126,6 +112,8 @@ module Racecar
126
112
  def process_batch(event)
127
113
  offset = event.payload.fetch(:last_offset)
128
114
  messages = event.payload.fetch(:message_count)
115
+ last_create_time = event.payload.fetch(:last_create_time)
116
+ time_lag = last_create_time && ((Time.now - last_create_time) * 1000).to_i
129
117
  tags = default_tags(event)
130
118
 
131
119
  if event.payload.key?(:exception)
@@ -135,7 +123,12 @@ module Racecar
135
123
  count("consumer.messages", messages, tags: tags)
136
124
  end
137
125
 
126
+ histogram("consumer.batch_size", messages, tags: tags)
138
127
  gauge("consumer.offset", offset, tags: tags)
128
+
129
+ if time_lag
130
+ gauge("consumer.time_lag", time_lag, tags: tags)
131
+ end
139
132
  end
140
133
 
141
134
  def join_group(event)
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  ##
3
5
  # Common API for instrumentation to standardize
@@ -11,6 +13,8 @@ module Racecar
11
13
  @default_payload = default_payload
12
14
 
13
15
  @backend = if defined?(ActiveSupport::Notifications)
16
+ # ActiveSupport needs `concurrent-ruby` but doesn't `require` it.
17
+ require 'concurrent/utility/monotonic_time'
14
18
  ActiveSupport::Notifications
15
19
  else
16
20
  NullInstrumenter
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "forwardable"
2
4
 
3
5
  module Racecar
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  # Ignores all instrumentation events.
3
5
  class NullInstrumenter
@@ -1,8 +1,12 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  class Pause
5
+ attr_reader :pauses_count
6
+
3
7
  def initialize(timeout: nil, max_timeout: nil, exponential_backoff: false)
4
8
  @started_at = nil
5
- @pauses = 0
9
+ @pauses_count = 0
6
10
  @timeout = timeout
7
11
  @max_timeout = max_timeout
8
12
  @exponential_backoff = exponential_backoff
@@ -11,7 +15,7 @@ module Racecar
11
15
  def pause!
12
16
  @started_at = Time.now
13
17
  @ends_at = @started_at + backoff_interval unless @timeout.nil?
14
- @pauses += 1
18
+ @pauses_count += 1
15
19
  end
16
20
 
17
21
  def resume!
@@ -38,13 +42,13 @@ module Racecar
38
42
  end
39
43
 
40
44
  def reset!
41
- @pauses = 0
45
+ @pauses_count = 0
42
46
  end
43
47
 
44
48
  def backoff_interval
45
49
  return Float::INFINITY if @timeout.nil?
46
50
 
47
- backoff_factor = @exponential_backoff ? 2**@pauses : 1
51
+ backoff_factor = @exponential_backoff ? 2**@pauses_count : 1
48
52
  timeout = backoff_factor * @timeout
49
53
 
50
54
  timeout = @max_timeout if @max_timeout && timeout > @max_timeout
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
4
  module RailsConfigFileLoader
3
5
  def self.load!
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "rdkafka"
2
4
  require "racecar/pause"
3
5
  require "racecar/message"
@@ -65,12 +67,12 @@ module Racecar
65
67
  @instrumenter.instrument("main_loop", instrumentation_payload) do
66
68
  case process_method
67
69
  when :batch then
68
- msg_per_part = consumer.batch_poll(config.max_wait_time).group_by(&:partition)
70
+ msg_per_part = consumer.batch_poll(config.max_wait_time_ms).group_by(&:partition)
69
71
  msg_per_part.each_value do |messages|
70
72
  process_batch(messages)
71
73
  end
72
74
  when :single then
73
- message = consumer.poll(config.max_wait_time)
75
+ message = consumer.poll(config.max_wait_time_ms)
74
76
  process(message) if message
75
77
  end
76
78
  end
@@ -166,11 +168,17 @@ module Racecar
166
168
  }
167
169
 
168
170
  @instrumenter.instrument("start_process_message", instrumentation_payload)
169
- @instrumenter.instrument("process_message", instrumentation_payload) do
170
- with_pause(message.topic, message.partition, message.offset..message.offset) do
171
- processor.process(Racecar::Message.new(message))
172
- processor.deliver!
173
- consumer.store_offset(message)
171
+ with_pause(message.topic, message.partition, message.offset..message.offset) do |pause|
172
+ begin
173
+ @instrumenter.instrument("process_message", instrumentation_payload) do
174
+ processor.process(Racecar::Message.new(message))
175
+ processor.deliver!
176
+ consumer.store_offset(message)
177
+ end
178
+ rescue => e
179
+ instrumentation_payload[:retries_count] = pause.pauses_count
180
+ config.error_handler.call(e, instrumentation_payload)
181
+ raise e
174
182
  end
175
183
  end
176
184
  end
@@ -183,31 +191,38 @@ module Racecar
183
191
  partition: first.partition,
184
192
  first_offset: first.offset,
185
193
  last_offset: last.offset,
194
+ last_create_time: last.timestamp,
186
195
  message_count: messages.size
187
196
  }
188
197
 
189
198
  @instrumenter.instrument("start_process_batch", instrumentation_payload)
190
199
  @instrumenter.instrument("process_batch", instrumentation_payload) do
191
- with_pause(first.topic, first.partition, first.offset..last.offset) do
192
- processor.process_batch(messages.map {|message| Racecar::Message.new(message) })
193
- processor.deliver!
194
- consumer.store_offset(messages.last)
200
+ with_pause(first.topic, first.partition, first.offset..last.offset) do |pause|
201
+ begin
202
+ processor.process_batch(messages.map {|message| Racecar::Message.new(message) })
203
+ processor.deliver!
204
+ consumer.store_offset(messages.last)
205
+ rescue => e
206
+ instrumentation_payload[:retries_count] = pause.pauses_count
207
+ config.error_handler.call(e, instrumentation_payload)
208
+ raise e
209
+ end
195
210
  end
196
211
  end
197
212
  end
198
213
 
199
214
  def with_pause(topic, partition, offsets)
200
- return yield if config.pause_timeout == 0
215
+ pause = pauses[topic][partition]
216
+ return yield pause if config.pause_timeout == 0
201
217
 
202
218
  begin
203
- yield
219
+ yield pause
204
220
  # We've successfully processed a batch from the partition, so we can clear the pause.
205
221
  pauses[topic][partition].reset!
206
222
  rescue => e
207
223
  desc = "#{topic}/#{partition}"
208
224
  logger.error "Failed to process #{desc} at #{offsets}: #{e}"
209
225
 
210
- pause = pauses[topic][partition]
211
226
  logger.warn "Pausing partition #{desc} for #{pause.backoff_interval} seconds"
212
227
  consumer.pause(topic, partition, offsets.first)
213
228
  pause.pause!
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Racecar
2
- VERSION = "2.0.0.beta3"
4
+ VERSION = "2.1.0"
3
5
  end
@@ -21,12 +21,13 @@ Gem::Specification.new do |spec|
21
21
  spec.require_paths = ["lib"]
22
22
 
23
23
  spec.add_runtime_dependency "king_konf", "~> 0.3.7"
24
- spec.add_runtime_dependency "rdkafka", "~> 0.6.0"
24
+ spec.add_runtime_dependency "rdkafka", "~> 0.8.0"
25
25
 
26
26
  spec.add_development_dependency "bundler", [">= 1.13", "< 3"]
27
+ spec.add_development_dependency "pry"
27
28
  spec.add_development_dependency "rake", "> 10.0"
28
29
  spec.add_development_dependency "rspec", "~> 3.0"
29
30
  spec.add_development_dependency "timecop"
30
- spec.add_development_dependency "dogstatsd-ruby", ">= 3.0.0", "< 5.0.0"
31
+ spec.add_development_dependency "dogstatsd-ruby", ">= 4.0.0", "< 5.0.0"
31
32
  spec.add_development_dependency "activesupport", ">= 4.0", "< 6.1"
32
33
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: racecar
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.0.beta3
4
+ version: 2.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Schierbeck
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: exe
11
11
  cert_chain: []
12
- date: 2020-02-06 00:00:00.000000000 Z
12
+ date: 2020-09-30 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: king_konf
@@ -31,14 +31,14 @@ dependencies:
31
31
  requirements:
32
32
  - - "~>"
33
33
  - !ruby/object:Gem::Version
34
- version: 0.6.0
34
+ version: 0.8.0
35
35
  type: :runtime
36
36
  prerelease: false
37
37
  version_requirements: !ruby/object:Gem::Requirement
38
38
  requirements:
39
39
  - - "~>"
40
40
  - !ruby/object:Gem::Version
41
- version: 0.6.0
41
+ version: 0.8.0
42
42
  - !ruby/object:Gem::Dependency
43
43
  name: bundler
44
44
  requirement: !ruby/object:Gem::Requirement
@@ -59,6 +59,20 @@ dependencies:
59
59
  - - "<"
60
60
  - !ruby/object:Gem::Version
61
61
  version: '3'
62
+ - !ruby/object:Gem::Dependency
63
+ name: pry
64
+ requirement: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ type: :development
70
+ prerelease: false
71
+ version_requirements: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
62
76
  - !ruby/object:Gem::Dependency
63
77
  name: rake
64
78
  requirement: !ruby/object:Gem::Requirement
@@ -107,7 +121,7 @@ dependencies:
107
121
  requirements:
108
122
  - - ">="
109
123
  - !ruby/object:Gem::Version
110
- version: 3.0.0
124
+ version: 4.0.0
111
125
  - - "<"
112
126
  - !ruby/object:Gem::Version
113
127
  version: 5.0.0
@@ -117,7 +131,7 @@ dependencies:
117
131
  requirements:
118
132
  - - ">="
119
133
  - !ruby/object:Gem::Version
120
- version: 3.0.0
134
+ version: 4.0.0
121
135
  - - "<"
122
136
  - !ruby/object:Gem::Version
123
137
  version: 5.0.0
@@ -151,6 +165,7 @@ executables:
151
165
  extensions: []
152
166
  extra_rdoc_files: []
153
167
  files:
168
+ - ".circleci/config.yml"
154
169
  - ".github/workflows/rspec.yml"
155
170
  - ".gitignore"
156
171
  - ".rspec"
@@ -162,6 +177,7 @@ files:
162
177
  - Rakefile
163
178
  - bin/console
164
179
  - bin/setup
180
+ - docker-compose.yml
165
181
  - examples/batch_consumer.rb
166
182
  - examples/cat_consumer.rb
167
183
  - examples/producing_consumer.rb
@@ -203,12 +219,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
203
219
  version: '0'
204
220
  required_rubygems_version: !ruby/object:Gem::Requirement
205
221
  requirements:
206
- - - ">"
222
+ - - ">="
207
223
  - !ruby/object:Gem::Version
208
- version: 1.3.1
224
+ version: '0'
209
225
  requirements: []
210
- rubyforge_project:
211
- rubygems_version: 2.7.6
226
+ rubygems_version: 3.1.2
212
227
  signing_key:
213
228
  specification_version: 4
214
229
  summary: A framework for running Kafka consumers