phobos 2.0.2 → 2.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 77558904b4cc06321149de51f2b13edeb97704a93128749214ba8454ff997db3
4
- data.tar.gz: c88ded6d626d55f56eef5645147f102867097bbc5d32c8c4d3604a24b53f8d57
3
+ metadata.gz: ec66514784d6b38c90a4ca0aaf063e001536d2680732c4afa535b49c2a55cb9c
4
+ data.tar.gz: 1d988dd76e39305cfb30d3dd8e8fd926065631e391ed077d7c0459e6760587b4
5
5
  SHA512:
6
- metadata.gz: e2c541adb24d1a42be9c99958508ebc143c078c3294df9245094c954de8c5d240be89fdf8a1afcf05ed3ad556290b2a7928b5722b4e1b56a25b2095f9e3d2b69
7
- data.tar.gz: 49af36a25cc903e7bcc87e6a7a453dfd32127111ce99896b77bf31f50c1967cc8550b9e3740d47d42febaae44ffa92c7dd5e3959c96a0b637bcc5bb20ddf0c66
6
+ metadata.gz: 07a92fdf61d970a61f98cfb8615568fa48af0550fb473458b6dce67b479b684ce8e280dbd97b20b5c16f84bf14fa18eea47f8bae490c6ac5712b8d21d2c4d037
7
+ data.tar.gz: 59bb2c58cfd017b8f9462eb431d2bb62322bdc817a1260816648481abff4ff0a5d6f1df5430f3888a0bad3cb92eeb11ee2ace3ced62df1e8ee02756bb1e8a795
data/.gitignore CHANGED
@@ -13,3 +13,4 @@ config/*.yml
13
13
  log/*.log
14
14
  .byebug_history
15
15
  .idea
16
+ *.gem
data/CHANGELOG.md CHANGED
@@ -6,6 +6,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
6
6
  ``
7
7
  ## UNRELEASED
8
8
 
9
+ ## [2.1.0] - 2021-05-27
10
+
11
+ - Modify config to allow specifying kafka connection information separately for consumers and producers
12
+
9
13
  ## [2.0.2] - 2021-01-28
10
14
  - Additional fixes for Ruby 3.0
11
15
 
data/README.md CHANGED
@@ -404,9 +404,12 @@ All [options supported by `ruby-kafka`][ruby-kafka-client] can be provided.
404
404
  __producer__ provides configurations for all producers created over the application,
405
405
  the options are the same for regular and async producers.
406
406
  All [options supported by `ruby-kafka`][ruby-kafka-producer] can be provided.
407
+ If the __kafka__ key is present under __producer__, it is merged into the top-level __kafka__, allowing different connection configuration for producers.
407
408
 
408
409
  __consumer__ provides configurations for all consumer groups created over the application.
409
410
  All [options supported by `ruby-kafka`][ruby-kafka-consumer] can be provided.
411
+ If the __kafka__ key is present under __consumer__, it is merged into the top-level __kafka__, allowing different connection configuration for consumers.
412
+
410
413
 
411
414
  __backoff__ Phobos provides automatic retries for your handlers. If an exception
412
415
  is raised, the listener will retry following the back off configured here.
@@ -582,8 +585,11 @@ After checking out the repo:
582
585
  * make sure `docker` is installed and running (for windows and mac this also includes `docker-compose`).
583
586
  * Linux: make sure `docker-compose` is installed and running.
584
587
  * run `bin/setup` to install dependencies
585
- * run `docker-compose up` to start the required kafka containers in a window
586
- * run `rspec` to run the tests in another window
588
+ * run `docker-compose up -d --force-recreate kafka zookeeper` to start the required kafka containers
589
+ * run tests to confirm no environmental issues
590
+ * wait a few seconds for kafka broker to get set up - `sleep 30`
591
+ * run `docker-compose run --rm test`
592
+ * make sure it reports `X examples, 0 failures`
587
593
 
588
594
  You can also run `bin/console` for an interactive prompt that will allow you to experiment.
589
595
 
@@ -65,6 +65,10 @@ producer:
65
65
  # that you need to manually call sync_producer_shutdown before exiting,
66
66
  # similar to async_producer_shutdown.
67
67
  persistent_connections: false
68
+ # kafka here supports the same parameters as the top-level, allowing custom connection
69
+ # configuration details for producers
70
+ kafka:
71
+ connect_timeout: 120
68
72
 
69
73
  consumer:
70
74
  # number of seconds after which, if a client hasn't contacted the Kafka cluster,
@@ -79,6 +83,10 @@ consumer:
79
83
  offset_retention_time:
80
84
  # interval between heartbeats; must be less than the session window
81
85
  heartbeat_interval: 10
86
+ # kafka here supports the same parameters as the top-level, allowing custom connection
87
+ # configuration details for consumers
88
+ kafka:
89
+ connect_timeout: 130
82
90
 
83
91
  backoff:
84
92
  min_ms: 1000
data/lib/phobos.rb CHANGED
@@ -55,8 +55,12 @@ module Phobos
55
55
 
56
56
  def configure(configuration)
57
57
  @config = fetch_configuration(configuration)
58
- @config.class.send(:define_method, :producer_hash) { Phobos.config.producer&.to_hash }
59
- @config.class.send(:define_method, :consumer_hash) { Phobos.config.consumer&.to_hash }
58
+ @config.class.send(:define_method, :producer_hash) do
59
+ Phobos.config.producer&.to_hash&.except(:kafka)
60
+ end
61
+ @config.class.send(:define_method, :consumer_hash) do
62
+ Phobos.config.consumer&.to_hash&.except(:kafka)
63
+ end
60
64
  @config.listeners ||= []
61
65
  configure_logger
62
66
  end
@@ -66,8 +70,14 @@ module Phobos
66
70
  @config.listeners += listeners_config.listeners
67
71
  end
68
72
 
69
- def create_kafka_client
70
- Kafka.new(**config.kafka.to_hash.merge(logger: @ruby_kafka_logger))
73
+ def create_kafka_client(config_key = nil)
74
+ kafka_config = config.kafka.to_hash.merge(logger: @ruby_kafka_logger)
75
+
76
+ if config_key
77
+ kafka_config = kafka_config.merge(**config.send(config_key)&.kafka&.to_hash || {})
78
+ end
79
+
80
+ Kafka.new(**kafka_config)
71
81
  end
72
82
 
73
83
  def create_exponential_backoff(backoff_config = nil)
@@ -35,7 +35,7 @@ module Phobos
35
35
  )
36
36
  @encoding = Encoding.const_get(force_encoding.to_sym) if force_encoding
37
37
  @message_processing_opts = compact(min_bytes: min_bytes, max_wait_time: max_wait_time)
38
- @kafka_client = Phobos.create_kafka_client
38
+ @kafka_client = Phobos.create_kafka_client(:consumer)
39
39
  @producer_enabled = @handler_class.ancestors.include?(Phobos::Producer)
40
40
  end
41
41
  # rubocop:enable Metrics/MethodLength
@@ -77,7 +77,7 @@ module Phobos
77
77
  end
78
78
 
79
79
  def create_sync_producer
80
- client = kafka_client || configure_kafka_client(Phobos.create_kafka_client)
80
+ client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
81
81
  sync_producer = client.producer(**regular_configs)
82
82
  if Phobos.config.producer_hash[:persistent_connections]
83
83
  producer_store[:sync_producer] = sync_producer
@@ -108,7 +108,7 @@ module Phobos
108
108
  end
109
109
 
110
110
  def create_async_producer
111
- client = kafka_client || configure_kafka_client(Phobos.create_kafka_client)
111
+ client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
112
112
  async_producer = client.async_producer(**async_configs)
113
113
  producer_store[:async_producer] = async_producer
114
114
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Phobos
4
- VERSION = '2.0.2'
4
+ VERSION = '2.1.0'
5
5
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: phobos
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.2
4
+ version: 2.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Túlio Ornelas
@@ -15,7 +15,7 @@ authors:
15
15
  autorequire:
16
16
  bindir: bin
17
17
  cert_chain: []
18
- date: 2021-01-28 00:00:00.000000000 Z
18
+ date: 2021-05-27 00:00:00.000000000 Z
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency
21
21
  name: bundler
@@ -314,7 +314,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
314
314
  - !ruby/object:Gem::Version
315
315
  version: '0'
316
316
  requirements: []
317
- rubygems_version: 3.2.3
317
+ rubygems_version: 3.2.16
318
318
  signing_key:
319
319
  specification_version: 4
320
320
  summary: Simplifying Kafka for ruby apps