karafka 1.2.8 → 1.4.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (113) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/.coditsu/ci.yml +3 -0
  5. data/.console_irbrc +1 -3
  6. data/.diffend.yml +3 -0
  7. data/.github/FUNDING.yml +3 -0
  8. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  9. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  10. data/.github/workflows/ci.yml +52 -0
  11. data/.gitignore +1 -0
  12. data/.ruby-version +1 -1
  13. data/CHANGELOG.md +134 -14
  14. data/CODE_OF_CONDUCT.md +1 -1
  15. data/CONTRIBUTING.md +1 -1
  16. data/Gemfile +4 -5
  17. data/Gemfile.lock +92 -81
  18. data/README.md +9 -12
  19. data/bin/karafka +1 -1
  20. data/certs/mensfeld.pem +25 -0
  21. data/config/errors.yml +38 -5
  22. data/docker-compose.yml +17 -0
  23. data/karafka.gemspec +18 -17
  24. data/lib/karafka.rb +10 -16
  25. data/lib/karafka/app.rb +14 -6
  26. data/lib/karafka/attributes_map.rb +5 -10
  27. data/lib/karafka/base_consumer.rb +19 -30
  28. data/lib/karafka/base_responder.rb +45 -27
  29. data/lib/karafka/cli.rb +2 -2
  30. data/lib/karafka/cli/console.rb +11 -9
  31. data/lib/karafka/cli/flow.rb +9 -7
  32. data/lib/karafka/cli/info.rb +4 -2
  33. data/lib/karafka/cli/install.rb +30 -6
  34. data/lib/karafka/cli/server.rb +11 -6
  35. data/lib/karafka/code_reloader.rb +67 -0
  36. data/lib/karafka/connection/api_adapter.rb +22 -9
  37. data/lib/karafka/connection/batch_delegator.rb +55 -0
  38. data/lib/karafka/connection/builder.rb +5 -3
  39. data/lib/karafka/connection/client.rb +31 -31
  40. data/lib/karafka/connection/listener.rb +26 -15
  41. data/lib/karafka/connection/message_delegator.rb +36 -0
  42. data/lib/karafka/consumers/batch_metadata.rb +10 -0
  43. data/lib/karafka/consumers/callbacks.rb +32 -15
  44. data/lib/karafka/consumers/includer.rb +31 -18
  45. data/lib/karafka/consumers/responders.rb +2 -2
  46. data/lib/karafka/contracts.rb +10 -0
  47. data/lib/karafka/contracts/config.rb +21 -0
  48. data/lib/karafka/contracts/consumer_group.rb +206 -0
  49. data/lib/karafka/contracts/consumer_group_topic.rb +19 -0
  50. data/lib/karafka/contracts/responder_usage.rb +54 -0
  51. data/lib/karafka/contracts/server_cli_options.rb +31 -0
  52. data/lib/karafka/errors.rb +17 -16
  53. data/lib/karafka/fetcher.rb +28 -30
  54. data/lib/karafka/helpers/class_matcher.rb +12 -2
  55. data/lib/karafka/helpers/config_retriever.rb +1 -1
  56. data/lib/karafka/helpers/inflector.rb +26 -0
  57. data/lib/karafka/helpers/multi_delegator.rb +0 -1
  58. data/lib/karafka/instrumentation/logger.rb +9 -6
  59. data/lib/karafka/instrumentation/monitor.rb +15 -9
  60. data/lib/karafka/instrumentation/proctitle_listener.rb +36 -0
  61. data/lib/karafka/instrumentation/stdout_listener.rb +140 -0
  62. data/lib/karafka/params/batch_metadata.rb +26 -0
  63. data/lib/karafka/params/builders/batch_metadata.rb +30 -0
  64. data/lib/karafka/params/builders/params.rb +38 -0
  65. data/lib/karafka/params/builders/params_batch.rb +25 -0
  66. data/lib/karafka/params/metadata.rb +20 -0
  67. data/lib/karafka/params/params.rb +54 -0
  68. data/lib/karafka/params/params_batch.rb +35 -21
  69. data/lib/karafka/patches/ruby_kafka.rb +21 -8
  70. data/lib/karafka/persistence/client.rb +15 -11
  71. data/lib/karafka/persistence/{consumer.rb → consumers.rb} +20 -13
  72. data/lib/karafka/persistence/topics.rb +48 -0
  73. data/lib/karafka/process.rb +0 -2
  74. data/lib/karafka/responders/builder.rb +1 -1
  75. data/lib/karafka/responders/topic.rb +6 -8
  76. data/lib/karafka/routing/builder.rb +36 -8
  77. data/lib/karafka/routing/consumer_group.rb +1 -1
  78. data/lib/karafka/routing/consumer_mapper.rb +9 -9
  79. data/lib/karafka/routing/proxy.rb +10 -1
  80. data/lib/karafka/routing/topic.rb +5 -3
  81. data/lib/karafka/routing/topic_mapper.rb +16 -18
  82. data/lib/karafka/serialization/json/deserializer.rb +27 -0
  83. data/lib/karafka/serialization/json/serializer.rb +31 -0
  84. data/lib/karafka/server.rb +29 -28
  85. data/lib/karafka/setup/config.rb +67 -37
  86. data/lib/karafka/setup/configurators/water_drop.rb +7 -3
  87. data/lib/karafka/setup/dsl.rb +0 -1
  88. data/lib/karafka/status.rb +7 -3
  89. data/lib/karafka/templates/{application_consumer.rb.example → application_consumer.rb.erb} +2 -1
  90. data/lib/karafka/templates/{application_responder.rb.example → application_responder.rb.erb} +0 -0
  91. data/lib/karafka/templates/karafka.rb.erb +92 -0
  92. data/lib/karafka/version.rb +1 -1
  93. metadata +94 -72
  94. metadata.gz.sig +0 -0
  95. data/.travis.yml +0 -21
  96. data/lib/karafka/callbacks.rb +0 -30
  97. data/lib/karafka/callbacks/config.rb +0 -22
  98. data/lib/karafka/callbacks/dsl.rb +0 -16
  99. data/lib/karafka/connection/delegator.rb +0 -46
  100. data/lib/karafka/instrumentation/listener.rb +0 -112
  101. data/lib/karafka/loader.rb +0 -28
  102. data/lib/karafka/params/dsl.rb +0 -156
  103. data/lib/karafka/parsers/json.rb +0 -38
  104. data/lib/karafka/patches/dry_configurable.rb +0 -35
  105. data/lib/karafka/persistence/topic.rb +0 -29
  106. data/lib/karafka/schemas/config.rb +0 -24
  107. data/lib/karafka/schemas/consumer_group.rb +0 -78
  108. data/lib/karafka/schemas/consumer_group_topic.rb +0 -18
  109. data/lib/karafka/schemas/responder_usage.rb +0 -39
  110. data/lib/karafka/schemas/server_cli_options.rb +0 -43
  111. data/lib/karafka/setup/configurators/base.rb +0 -29
  112. data/lib/karafka/setup/configurators/params.rb +0 -25
  113. data/lib/karafka/templates/karafka.rb.example +0 -54
@@ -4,28 +4,36 @@ module Karafka
4
4
  # App class
5
5
  class App
6
6
  extend Setup::Dsl
7
- extend Callbacks::Dsl
8
7
 
9
8
  class << self
10
9
  # Sets up all the internal components and bootstrap whole app
11
10
  # We need to know details about consumers in order to setup components,
12
11
  # that's why we don't setup them after std setup is done
13
- # @raise [Karafka::Errors::InvalidConfiguration] raised when configuration
14
- # doesn't match with ConfigurationSchema
12
+ # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
13
+ # doesn't match with the config contract
15
14
  def boot!
15
+ initialize!
16
16
  Setup::Config.validate!
17
17
  Setup::Config.setup_components
18
- Callbacks.after_init(Karafka::App.config)
18
+ initialized!
19
19
  end
20
20
 
21
21
  # @return [Karafka::Routing::Builder] consumers builder instance
22
22
  def consumer_groups
23
- Routing::Builder.instance
23
+ config.internal.routing_builder
24
+ end
25
+
26
+ # Triggers reload of all cached Karafka app components, so we can use in-process
27
+ # in-development hot code reloading without Karafka process restart
28
+ def reload
29
+ Karafka::Persistence::Consumers.clear
30
+ Karafka::Persistence::Topics.clear
31
+ config.internal.routing_builder.reload
24
32
  end
25
33
 
26
34
  Status.instance_methods(false).each do |delegated|
27
35
  define_method(delegated) do
28
- Status.instance.send(delegated)
36
+ App.config.internal.status.send(delegated)
29
37
  end
30
38
  end
31
39
 
@@ -11,9 +11,9 @@ module Karafka
11
11
  module AttributesMap
12
12
  class << self
13
13
  # What settings should go where in ruby-kafka
14
+ # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
14
15
  # @note All other settings will be passed to Kafka.new method invocation.
15
16
  # All elements in this hash are just edge cases
16
- # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
17
17
  def api_adapter
18
18
  {
19
19
  consumer: %i[
@@ -22,7 +22,7 @@ module Karafka
22
22
  ],
23
23
  subscribe: %i[start_from_beginning max_bytes_per_partition],
24
24
  consumption: %i[min_bytes max_bytes max_wait_time],
25
- pause: %i[pause_timeout],
25
+ pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
26
26
  # All the options that are under kafka config namespace, but are not used
27
27
  # directly with kafka api, but from the Karafka user perspective, they are
28
28
  # still related to kafka. They should not be proxied anywhere
@@ -35,10 +35,9 @@ module Karafka
35
35
  (api_adapter[:subscribe] + %i[
36
36
  backend
37
37
  name
38
- parser
38
+ deserializer
39
39
  responder
40
40
  batch_consuming
41
- persistent
42
41
  ]).uniq
43
42
  end
44
43
 
@@ -53,12 +52,8 @@ module Karafka
53
52
  ignored_settings = api_adapter[:subscribe]
54
53
  defined_settings = api_adapter.values.flatten
55
54
  karafka_settings = %i[batch_fetching]
56
- # This is a drity and bad hack of dry-configurable to get keys before setting values
57
- dynamically_proxied = Karafka::Setup::Config
58
- ._settings
59
- .find { |s| s.name == :kafka }
60
- .value
61
- .instance_variable_get('@klass').settings
55
+
56
+ dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
62
57
 
63
58
  (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
64
59
  end
@@ -4,41 +4,33 @@
4
4
  module Karafka
5
5
  # Base consumer from which all Karafka consumers should inherit
6
6
  class BaseConsumer
7
- extend ActiveSupport::DescendantsTracker
8
7
  extend Forwardable
9
8
 
10
9
  # Allows us to mark messages as consumed for non-automatic mode without having
11
10
  # to use consumer client directly. We do this that way, because most of the people should not
12
11
  # mess with the client instance directly (just in case)
13
- def_delegator :client, :mark_as_consumed
14
-
15
- private :mark_as_consumed
16
-
17
- class << self
18
- attr_reader :topic
19
-
20
- # Assigns a topic to a consumer and builds up proper consumer functionalities
21
- # so that it can cooperate with the topic settings
22
- # @param topic [Karafka::Routing::Topic]
23
- # @return [Karafka::Routing::Topic] assigned topic
24
- def topic=(topic)
25
- @topic = topic
26
- Consumers::Includer.call(self)
27
- end
12
+ %i[
13
+ mark_as_consumed
14
+ mark_as_consumed!
15
+ trigger_heartbeat
16
+ trigger_heartbeat!
17
+ ].each do |delegated_method_name|
18
+ def_delegator :client, delegated_method_name
19
+
20
+ private delegated_method_name
28
21
  end
29
22
 
30
23
  # @return [Karafka::Routing::Topic] topic to which a given consumer is subscribed
31
- def topic
32
- self.class.topic
33
- end
34
-
35
- # Creates lazy loaded params batch object
36
- # @note Until first params usage, it won't parse data at all
37
- # @param messages [Array<Kafka::FetchedMessage>, Array<Hash>] messages with raw
38
- # content (from Kafka) or messages inside a hash (from backend, etc)
39
- # @return [Karafka::Params::ParamsBatch] lazy loaded params batch
40
- def params_batch=(messages)
41
- @params_batch = Karafka::Params::ParamsBatch.new(messages, topic.parser)
24
+ attr_reader :topic
25
+ # @return [Karafka::Params:ParamsBatch] current params batch
26
+ attr_accessor :params_batch
27
+
28
+ # Assigns a topic to a consumer and builds up proper consumer functionalities
29
+ # so that it can cooperate with the topic settings
30
+ # @param topic [Karafka::Routing::Topic]
31
+ def initialize(topic)
32
+ @topic = topic
33
+ Consumers::Includer.call(self)
42
34
  end
43
35
 
44
36
  # Executes the default consumer flow.
@@ -48,9 +40,6 @@ module Karafka
48
40
 
49
41
  private
50
42
 
51
- # We make it private as it should be accessible only from the inside of a consumer
52
- attr_reader :params_batch
53
-
54
43
  # @return [Karafka::Connection::Client] messages consuming client that can be used to
55
44
  # commit manually offset or pause / stop consumer based on the business logic
56
45
  def client
@@ -39,7 +39,7 @@ module Karafka
39
39
  #
40
40
  # @example Multiple times used topic
41
41
  # class Responder < BaseResponder
42
- # topic :required_topic, multiple_usage: true
42
+ # topic :required_topic
43
43
  #
44
44
  # def respond(data)
45
45
  # data.each do |subset|
@@ -48,6 +48,17 @@ module Karafka
48
48
  # end
49
49
  # end
50
50
  #
51
+ # @example Specify serializer for a topic
52
+ # class Responder < BaseResponder
53
+ # topic :xml_topic, serializer: MyXMLSerializer
54
+ #
55
+ # def respond(data)
56
+ # data.each do |subset|
57
+ # respond_to :xml_topic, subset
58
+ # end
59
+ # end
60
+ # end
61
+ #
51
62
  # @example Accept multiple arguments to a respond method
52
63
  # class Responder < BaseResponder
53
64
  # topic :users_actions
@@ -59,31 +70,35 @@ module Karafka
59
70
  # end
60
71
  # end
61
72
  class BaseResponder
62
- # Definitions of all topics that we want to be able to use in this responder should go here
63
- class_attribute :topics
64
-
65
- # Schema that we can use to control and/or require some additional details upon options
66
- # that are being passed to the producer. This can be in particular useful if we want to make
67
- # sure that for example partition_key is always present.
68
- class_attribute :options_schema
73
+ # Responder usage contract
74
+ CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
69
75
 
70
- attr_reader :messages_buffer
76
+ private_constant :CONTRACT
71
77
 
72
78
  class << self
79
+ # Definitions of all topics that we want to be able to use in this responder should go here
80
+ attr_accessor :topics
81
+ # Contract that we can use to control and/or require some additional details upon options
82
+ # that are being passed to the producer. This can be in particular useful if we want to make
83
+ # sure that for example partition_key is always present.
84
+ attr_accessor :options_contract
85
+
73
86
  # Registers a topic as on to which we will be able to respond
74
87
  # @param topic_name [Symbol, String] name of topic to which we want to respond
75
88
  # @param options [Hash] hash with optional configuration details
76
89
  def topic(topic_name, options = {})
90
+ options[:serializer] ||= Karafka::App.config.serializer
91
+ options[:registered] = true
77
92
  self.topics ||= {}
78
- topic_obj = Responders::Topic.new(topic_name, options.merge(registered: true))
93
+ topic_obj = Responders::Topic.new(topic_name, options)
79
94
  self.topics[topic_obj.name] = topic_obj
80
95
  end
81
96
 
82
97
  # A simple alias for easier standalone responder usage.
83
- # Instead of building it with new.call it allows (in case of usin JSON parser)
98
+ # Instead of building it with new.call it allows (in case of using JSON serializer)
84
99
  # to just run it directly from the class level
85
100
  # @param data Anything that we want to respond with
86
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
101
+ # @example Send user data with a responder
87
102
  # UsersCreatedResponder.call(@created_user)
88
103
  def call(*data)
89
104
  # Just in case there were no topics defined for a responder, we initialize with
@@ -93,12 +108,11 @@ module Karafka
93
108
  end
94
109
  end
95
110
 
111
+ attr_reader :messages_buffer
112
+
96
113
  # Creates a responder object
97
- # @param parser_class [Class] parser class that we can use to generate appropriate string
98
- # or nothing if we want to default to Karafka::Parsers::Json
99
114
  # @return [Karafka::BaseResponder] base responder descendant responder
100
- def initialize(parser_class = Karafka::App.config.parser)
101
- @parser_class = parser_class
115
+ def initialize
102
116
  @messages_buffer = {}
103
117
  end
104
118
 
@@ -107,7 +121,7 @@ module Karafka
107
121
  # @note We know that validators should be executed also before sending data to topics, however
108
122
  # the implementation gets way more complicated then, that's why we check after everything
109
123
  # was sent using responder
110
- # @example Send user data with a responder (uses default Karafka::Parsers::Json parser)
124
+ # @example Send user data with a responder
111
125
  # UsersCreatedResponder.new.call(@created_user)
112
126
  # @example Send user data with a responder using non default Parser
113
127
  # UsersCreatedResponder.new(MyParser).call(@created_user)
@@ -134,25 +148,26 @@ module Karafka
134
148
  topic.to_h.merge!(usage_count: usage.count)
135
149
  end
136
150
 
137
- result = Karafka::Schemas::ResponderUsage.call(
151
+ result = CONTRACT.call(
138
152
  registered_topics: registered_topics,
139
153
  used_topics: used_topics
140
154
  )
141
155
 
142
156
  return if result.success?
143
157
 
144
- raise Karafka::Errors::InvalidResponderUsage, result.errors
158
+ raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
145
159
  end
146
160
 
147
161
  # Checks if we met all the options requirements before sending them to the producer.
148
162
  def validate_options!
149
- return true unless self.class.options_schema
163
+ return true unless self.class.options_contract
150
164
 
151
165
  messages_buffer.each_value do |messages_set|
152
166
  messages_set.each do |message_data|
153
- result = self.class.options_schema.call(message_data.last)
167
+ result = self.class.options_contract.call(message_data.last)
154
168
  next if result.success?
155
- raise Karafka::Errors::InvalidResponderMessageOptions, result.errors
169
+
170
+ raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
156
171
  end
157
172
  end
158
173
  end
@@ -174,6 +189,7 @@ module Karafka
174
189
 
175
190
  # Method that needs to be implemented in a subclass. It should handle responding
176
191
  # on registered topics
192
+ # @param _data [Object] anything that we want to use to send to Kafka
177
193
  # @raise [NotImplementedError] This method needs to be implemented in a subclass
178
194
  def respond(*_data)
179
195
  raise NotImplementedError, 'Implement this in a subclass'
@@ -183,7 +199,7 @@ module Karafka
183
199
  # as many times as we need. Especially when we have 1:n flow
184
200
  # @param topic [Symbol, String] topic to which we want to respond
185
201
  # @param data [String, Object] string or object that we want to send
186
- # @param options [Hash] options for waterdrop (e.g. partition_key)
202
+ # @param options [Hash] options for waterdrop (e.g. partition_key).
187
203
  # @note Respond to does not accept multiple data arguments.
188
204
  def respond_to(topic, data, options = {})
189
205
  # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
@@ -192,7 +208,7 @@ module Karafka
192
208
 
193
209
  messages_buffer[topic] ||= []
194
210
  messages_buffer[topic] << [
195
- @parser_class.generate(data),
211
+ self.class.topics[topic].serializer.call(data),
196
212
  options.merge(topic: topic)
197
213
  ]
198
214
  end
@@ -200,9 +216,11 @@ module Karafka
200
216
  # @param options [Hash] options for waterdrop
201
217
  # @return [Class] WaterDrop producer (sync or async based on the settings)
202
218
  def producer(options)
203
- self.class.topics[
204
- options[:topic]
205
- ].async? ? WaterDrop::AsyncProducer : WaterDrop::SyncProducer
219
+ if self.class.topics[options[:topic]].async?
220
+ WaterDrop::AsyncProducer
221
+ else
222
+ WaterDrop::SyncProducer
223
+ end
206
224
  end
207
225
  end
208
226
  end
@@ -37,7 +37,7 @@ end
37
37
  # This is kinda trick - since we don't have a autoload and other magic stuff
38
38
  # like Rails does, so instead this method allows us to replace currently running
39
39
  # console with a new one via Kernel.exec. It will start console with new code loaded
40
- # Yes we know that it is not turbofast, however it is turbo convinient and small
40
+ # Yes, we know that it is not turbo fast, however it is turbo convenient and small
41
41
  #
42
42
  # Also - the KARAFKA_CONSOLE is used to detect that we're executing the irb session
43
43
  # so this method is only available when the Karafka console is running
@@ -47,7 +47,7 @@ end
47
47
  if ENV['KARAFKA_CONSOLE']
48
48
  # Reloads Karafka irb console session
49
49
  def reload!
50
- puts "Reloading...\n"
50
+ Karafka.logger.info "Reloading...\n"
51
51
  Kernel.exec Karafka::Cli::Console.command
52
52
  end
53
53
  end
@@ -8,15 +8,17 @@ module Karafka
8
8
  desc 'Start the Karafka console (short-cut alias: "c")'
9
9
  option aliases: 'c'
10
10
 
11
- # @return [String] Console executing command
12
- # @example
13
- # Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
14
- def self.command
15
- envs = [
16
- "IRBRC='#{Karafka.gem_root}/.console_irbrc'",
17
- 'KARAFKA_CONSOLE=true'
18
- ]
19
- "#{envs.join(' ')} bundle exec irb"
11
+ class << self
12
+ # @return [String] Console executing command
13
+ # @example
14
+ # Karafka::Cli::Console.command #=> 'KARAFKA_CONSOLE=true bundle exec irb...'
15
+ def command
16
+ envs = [
17
+ "IRBRC='#{Karafka.gem_root}/.console_irbrc'",
18
+ 'KARAFKA_CONSOLE=true'
19
+ ]
20
+ "#{envs.join(' ')} bundle exec irb -r #{Karafka.boot_file}"
21
+ end
20
22
  end
21
23
 
22
24
  # Start the Karafka console
@@ -11,20 +11,22 @@ module Karafka
11
11
  def call
12
12
  topics.each do |topic|
13
13
  any_topics = !topic.responder&.topics.nil?
14
+ log_messages = []
14
15
 
15
16
  if any_topics
16
- puts "#{topic.name} =>"
17
+ log_messages << "#{topic.name} =>"
17
18
 
18
19
  topic.responder.topics.each_value do |responder_topic|
19
20
  features = []
20
21
  features << (responder_topic.required? ? 'always' : 'conditionally')
21
- features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
22
22
 
23
- print responder_topic.name, "(#{features.join(', ')})"
23
+ log_messages << format(responder_topic.name, "(#{features.join(', ')})")
24
24
  end
25
25
  else
26
- puts "#{topic.name} => (nothing)"
26
+ log_messages << "#{topic.name} => (nothing)"
27
27
  end
28
+
29
+ Karafka.logger.info(log_messages.join("\n"))
28
30
  end
29
31
  end
30
32
 
@@ -35,11 +37,11 @@ module Karafka
35
37
  Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
36
38
  end
37
39
 
38
- # Prints a given value with label in a nice way
40
+ # Formats a given value with label in a nice way
39
41
  # @param label [String] label describing value
40
42
  # @param value [String] value that should be printed
41
- def print(label, value)
42
- printf "%-25s %s\n", " - #{label}:", value
43
+ def format(label, value)
44
+ " - #{label}: #{value}"
43
45
  end
44
46
  end
45
47
  end
@@ -12,7 +12,9 @@ module Karafka
12
12
  config = Karafka::App.config
13
13
 
14
14
  info = [
15
- "Karafka framework version: #{Karafka::VERSION}",
15
+ "Karafka version: #{Karafka::VERSION}",
16
+ "Ruby version: #{RUBY_VERSION}",
17
+ "Ruby-kafka version: #{::Kafka::VERSION}",
16
18
  "Application client id: #{config.client_id}",
17
19
  "Backend: #{config.backend}",
18
20
  "Batch fetching: #{config.batch_fetching}",
@@ -22,7 +24,7 @@ module Karafka
22
24
  "Kafka seed brokers: #{config.kafka.seed_brokers}"
23
25
  ]
24
26
 
25
- puts(info.join("\n"))
27
+ Karafka.logger.info(info.join("\n"))
26
28
  end
27
29
  end
28
30
  end
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ require 'erb'
4
+
3
5
  module Karafka
4
6
  # Karafka framework Cli
5
7
  class Cli < Thor
@@ -11,18 +13,30 @@ module Karafka
11
13
  INSTALL_DIRS = %w[
12
14
  app/consumers
13
15
  app/responders
16
+ app/workers
14
17
  config
18
+ lib
15
19
  log
16
20
  tmp/pids
17
21
  ].freeze
18
22
 
19
23
  # Where should we map proper files from templates
20
24
  INSTALL_FILES_MAP = {
21
- 'karafka.rb.example' => Karafka.boot_file.basename,
22
- 'application_consumer.rb.example' => 'app/consumers/application_consumer.rb',
23
- 'application_responder.rb.example' => 'app/responders/application_responder.rb'
25
+ 'karafka.rb.erb' => Karafka.boot_file.basename,
26
+ 'application_consumer.rb.erb' => 'app/consumers/application_consumer.rb',
27
+ 'application_responder.rb.erb' => 'app/responders/application_responder.rb'
24
28
  }.freeze
25
29
 
30
+ # @param args [Array] all the things that Thor CLI accepts
31
+ def initialize(*args)
32
+ super
33
+ @rails = Bundler::LockfileParser.new(
34
+ Bundler.read_file(
35
+ Bundler.default_lockfile
36
+ )
37
+ ).dependencies.key?('rails')
38
+ end
39
+
26
40
  # Install all required things for Karafka application in current directory
27
41
  def call
28
42
  INSTALL_DIRS.each do |dir|
@@ -31,12 +45,22 @@ module Karafka
31
45
 
32
46
  INSTALL_FILES_MAP.each do |source, target|
33
47
  target = Karafka.root.join(target)
34
- next if File.exist?(target)
35
48
 
36
- source = Karafka.core_root.join("templates/#{source}")
37
- FileUtils.cp_r(source, target)
49
+ template = File.read(Karafka.core_root.join("templates/#{source}"))
50
+ # @todo Replace with the keyword argument version once we don't have to support
51
+ # Ruby < 2.6
52
+ render = ::ERB.new(template, nil, '-').result(binding)
53
+
54
+ File.open(target, 'w') { |file| file.write(render) }
38
55
  end
39
56
  end
57
+
58
+ # @return [Boolean] true if we have Rails loaded
59
+ # This allows us to generate customized karafka.rb template with some tweaks specific for
60
+ # Rails
61
+ def rails?
62
+ @rails
63
+ end
40
64
  end
41
65
  end
42
66
  end