karafka 2.4.8 → 2.4.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (88) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +0 -1
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +17 -0
  6. data/Gemfile +8 -5
  7. data/Gemfile.lock +24 -15
  8. data/bin/integrations +5 -0
  9. data/certs/cert.pem +26 -0
  10. data/config/locales/errors.yml +5 -0
  11. data/config/locales/pro_errors.yml +34 -0
  12. data/karafka.gemspec +1 -1
  13. data/lib/karafka/admin.rb +42 -0
  14. data/lib/karafka/base_consumer.rb +23 -0
  15. data/lib/karafka/contracts/config.rb +2 -0
  16. data/lib/karafka/contracts/consumer_group.rb +17 -0
  17. data/lib/karafka/errors.rb +3 -2
  18. data/lib/karafka/instrumentation/logger_listener.rb +3 -0
  19. data/lib/karafka/instrumentation/notifications.rb +3 -0
  20. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +32 -11
  21. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +1 -1
  22. data/lib/karafka/messages/message.rb +6 -0
  23. data/lib/karafka/pro/loader.rb +3 -1
  24. data/lib/karafka/pro/processing/strategies/dlq/default.rb +16 -1
  25. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +5 -1
  26. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +17 -1
  27. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +17 -1
  28. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +22 -6
  29. data/lib/karafka/pro/recurring_tasks/consumer.rb +105 -0
  30. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +53 -0
  31. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +41 -0
  32. data/lib/karafka/pro/recurring_tasks/deserializer.rb +35 -0
  33. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +87 -0
  34. data/lib/karafka/pro/recurring_tasks/errors.rb +34 -0
  35. data/lib/karafka/pro/recurring_tasks/executor.rb +152 -0
  36. data/lib/karafka/pro/recurring_tasks/listener.rb +38 -0
  37. data/lib/karafka/pro/recurring_tasks/matcher.rb +38 -0
  38. data/lib/karafka/pro/recurring_tasks/schedule.rb +63 -0
  39. data/lib/karafka/pro/recurring_tasks/serializer.rb +113 -0
  40. data/lib/karafka/pro/recurring_tasks/setup/config.rb +52 -0
  41. data/lib/karafka/pro/recurring_tasks/task.rb +151 -0
  42. data/lib/karafka/pro/recurring_tasks.rb +87 -0
  43. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +131 -0
  44. data/lib/karafka/pro/routing/features/recurring_tasks/config.rb +28 -0
  45. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +40 -0
  46. data/lib/karafka/pro/routing/features/recurring_tasks/proxy.rb +27 -0
  47. data/lib/karafka/pro/routing/features/recurring_tasks/topic.rb +44 -0
  48. data/lib/karafka/pro/routing/features/recurring_tasks.rb +25 -0
  49. data/lib/karafka/pro/routing/features/scheduled_messages/builder.rb +131 -0
  50. data/lib/karafka/pro/routing/features/scheduled_messages/config.rb +28 -0
  51. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +40 -0
  52. data/lib/karafka/pro/routing/features/scheduled_messages/proxy.rb +27 -0
  53. data/lib/karafka/pro/routing/features/scheduled_messages/topic.rb +44 -0
  54. data/lib/karafka/pro/routing/features/scheduled_messages.rb +24 -0
  55. data/lib/karafka/pro/scheduled_messages/consumer.rb +185 -0
  56. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +56 -0
  57. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +61 -0
  58. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +79 -0
  59. data/lib/karafka/pro/scheduled_messages/day.rb +45 -0
  60. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +46 -0
  61. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +35 -0
  62. data/lib/karafka/pro/scheduled_messages/dispatcher.rb +122 -0
  63. data/lib/karafka/pro/scheduled_messages/errors.rb +28 -0
  64. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +41 -0
  65. data/lib/karafka/pro/scheduled_messages/proxy.rb +176 -0
  66. data/lib/karafka/pro/scheduled_messages/schema_validator.rb +37 -0
  67. data/lib/karafka/pro/scheduled_messages/serializer.rb +55 -0
  68. data/lib/karafka/pro/scheduled_messages/setup/config.rb +60 -0
  69. data/lib/karafka/pro/scheduled_messages/state.rb +62 -0
  70. data/lib/karafka/pro/scheduled_messages/tracker.rb +64 -0
  71. data/lib/karafka/pro/scheduled_messages.rb +67 -0
  72. data/lib/karafka/processing/executor.rb +6 -0
  73. data/lib/karafka/processing/strategies/default.rb +10 -0
  74. data/lib/karafka/processing/strategies/dlq.rb +16 -2
  75. data/lib/karafka/processing/strategies/dlq_mom.rb +25 -6
  76. data/lib/karafka/processing/worker.rb +11 -1
  77. data/lib/karafka/railtie.rb +11 -42
  78. data/lib/karafka/routing/features/dead_letter_queue/config.rb +3 -0
  79. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +1 -0
  80. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +7 -2
  81. data/lib/karafka/routing/features/eofed/contracts/topic.rb +12 -0
  82. data/lib/karafka/routing/topic.rb +14 -0
  83. data/lib/karafka/setup/config.rb +3 -0
  84. data/lib/karafka/version.rb +1 -1
  85. data.tar.gz.sig +0 -0
  86. metadata +68 -25
  87. metadata.gz.sig +0 -0
  88. data/certs/cert_chain.pem +0 -26
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c7e8e51a5c0c4ded0d074965cee2090e64ca77e43443f4f36ab03cc3a21ddfd6
4
- data.tar.gz: c6e912c518d301f55974a9e4deb491ebe4c3e073e6748fe62ce1003eaea7bed7
3
+ metadata.gz: 7601c2daf3eaacae67697fe28dd403ba8bd41387df90da49691912dad7ba0963
4
+ data.tar.gz: 23d6763195d2e6bf17c573859d133637c21f849e3def1eb0f852d5cb4554ae17
5
5
  SHA512:
6
- metadata.gz: 8769a192c7ebb852250afd96611e115807172bf320d4e9d513bffbcc66f5570ca637aee1b8f3b68e46350b37054935ea905893139bc264a02949f975b39f2041
7
- data.tar.gz: 13dc8e118850aace7127bae5fe667b6f73118d220b61b1d2b590cc18e71243b3c1467d43df9ae134cd7b26c0274a100e34d8a07db0c6091bc02fd9c95ffb24f0
6
+ metadata.gz: 56202acc444f3b69af7a8b643b9e28f77ffbcadeab70858b0ffaa4b4a7a264082c636ff5c0abbaba0ac1cd6f2fb72fd6924bf6e87fe3f6e57549d5f228786e91
7
+ data.tar.gz: b39d96ef2bcd09079b044321058b4a741797673b815ff6508a8123a7a513f08ca364d05aa22dc994a25fe8c64d959bcd19342301156e97bf0b7d005d7abcb7db
checksums.yaml.gz.sig CHANGED
Binary file
@@ -77,7 +77,6 @@ jobs:
77
77
  - '3.3'
78
78
  - '3.2'
79
79
  - '3.1'
80
- - '3.0'
81
80
  include:
82
81
  - ruby: '3.3'
83
82
  coverage: 'true'
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.3.4
1
+ 3.3.5
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  # Karafka Framework Changelog
2
2
 
3
+ ## 2.4.10 (2024-09-03)
4
+ - **[Feature]** Provide Kafka based Scheduled Messages to be able to send messages in the future via a proxy topic.
5
+ - [Enhancement] Introduce a `#assigned` hook for consumers to be able to trigger actions when consumer is built and assigned but before first consume/ticking, etc.
6
+ - [Enhancement] Provide `Karafka::Messages::Message#tombstone?` to be able to quickly check if a message is a tombstone message.
7
+ - [Enhancement] Provide more flexible API for Recurring Tasks topics reconfiguration.
8
+ - [Enhancement] Remove no longer needed Rails connection releaser.
9
+ - [Enhancement] Update AppSignal client to support newer versions (tombruijn and hieuk09).
10
+ - [Fix] Fix a case where there would be a way to define multiple subscription groups for same topic with different consumer.
11
+
12
+ ## 2.4.9 (2024-08-23)
13
+ - **[Feature]** Provide Kafka based Recurring (Cron) Tasks.
14
+ - [Enhancement] Wrap worker work with Rails Reloader/Executor (fusion2004)
15
+ - [Enhancement] Allow for partial topic level kafka scope settings reconfiguration via `inherit` flag.
16
+ - [Enhancement] Validate `eof` kafka scope flag when `eofed` in routing enabled.
17
+ - [Enhancement] Provide `mark_after_dispatch` setting for granular DLQ marking control.
18
+ - [Enhancement] Provide `Karafka::Admin.rename_consumer_group`.
19
+
3
20
  ## 2.4.8 (2024-08-09)
4
21
  - **[Feature]** Introduce ability to react to `#eof` either from `#consume` or from `#eofed` when EOF without new messages.
5
22
  - [Enhancement] Provide `Consumer#eofed?` to indicate reaching EOF.
data/Gemfile CHANGED
@@ -6,20 +6,23 @@ plugin 'diffend'
6
6
 
7
7
  gemspec
8
8
 
9
- # Karafka gem does not require activejob nor karafka-web to work
9
+ # Karafka gem does not require activejob, karafka-web or fugit to work
10
10
  # They are added here because they are part of the integration suite
11
11
  # Since some of those are only needed for some specs, they should never be required automatically
12
+ group :integrations, :test do
13
+ gem 'fugit', require: false
14
+ gem 'rspec', require: false
15
+ end
16
+
12
17
  group :integrations do
13
18
  gem 'activejob', require: false
14
- gem 'karafka-testing', '>= 2.4.0', require: false
15
- gem 'karafka-web', '>= 0.10.0.beta1', require: false
16
- gem 'rspec', require: false
19
+ gem 'karafka-testing', '>= 2.4.6', require: false
20
+ gem 'karafka-web', '>= 0.10.0.rc2', require: false
17
21
  end
18
22
 
19
23
  group :test do
20
24
  gem 'byebug'
21
25
  gem 'factory_bot'
22
26
  gem 'ostruct'
23
- gem 'rspec'
24
27
  gem 'simplecov'
25
28
  end
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.4.8)
4
+ karafka (2.4.10)
5
5
  base64 (~> 0.2)
6
6
  karafka-core (>= 2.4.3, < 2.5.0)
7
7
  karafka-rdkafka (>= 0.17.2)
@@ -11,31 +11,37 @@ PATH
11
11
  GEM
12
12
  remote: https://rubygems.org/
13
13
  specs:
14
- activejob (7.1.3.4)
15
- activesupport (= 7.1.3.4)
14
+ activejob (7.2.1)
15
+ activesupport (= 7.2.1)
16
16
  globalid (>= 0.3.6)
17
- activesupport (7.1.3.4)
17
+ activesupport (7.2.1)
18
18
  base64
19
19
  bigdecimal
20
- concurrent-ruby (~> 1.0, >= 1.0.2)
20
+ concurrent-ruby (~> 1.0, >= 1.3.1)
21
21
  connection_pool (>= 2.2.5)
22
22
  drb
23
23
  i18n (>= 1.6, < 2)
24
+ logger (>= 1.4.2)
24
25
  minitest (>= 5.1)
25
- mutex_m
26
- tzinfo (~> 2.0)
26
+ securerandom (>= 0.3)
27
+ tzinfo (~> 2.0, >= 2.0.5)
27
28
  base64 (0.2.0)
28
29
  bigdecimal (3.1.8)
29
30
  byebug (11.1.3)
30
- concurrent-ruby (1.3.3)
31
+ concurrent-ruby (1.3.4)
31
32
  connection_pool (2.4.1)
32
33
  diff-lcs (1.5.1)
33
34
  docile (1.4.1)
34
35
  drb (2.2.1)
35
36
  erubi (1.13.0)
37
+ et-orbi (1.2.11)
38
+ tzinfo
36
39
  factory_bot (6.4.6)
37
40
  activesupport (>= 5.0.0)
38
41
  ffi (1.17.0)
42
+ fugit (1.11.1)
43
+ et-orbi (~> 1, >= 1.2.11)
44
+ raabro (~> 1.4)
39
45
  globalid (1.2.1)
40
46
  activesupport (>= 6.1)
41
47
  i18n (1.14.5)
@@ -49,19 +55,20 @@ GEM
49
55
  karafka-testing (2.4.6)
50
56
  karafka (>= 2.4.0, < 2.5.0)
51
57
  waterdrop (>= 2.7.0)
52
- karafka-web (0.10.0.rc1)
58
+ karafka-web (0.10.1)
53
59
  erubi (~> 1.4)
54
- karafka (>= 2.4.7, < 2.5.0)
60
+ karafka (>= 2.4.9, < 2.5.0)
55
61
  karafka-core (>= 2.4.0, < 2.5.0)
56
62
  roda (~> 3.68, >= 3.69)
57
63
  tilt (~> 2.0)
64
+ logger (1.6.0)
58
65
  mini_portile2 (2.8.7)
59
- minitest (5.24.1)
60
- mutex_m (0.2.0)
66
+ minitest (5.25.1)
61
67
  ostruct (0.6.0)
68
+ raabro (1.4.0)
62
69
  rack (3.1.7)
63
70
  rake (13.2.1)
64
- roda (3.82.0)
71
+ roda (3.83.0)
65
72
  rack
66
73
  rspec (3.13.0)
67
74
  rspec-core (~> 3.13.0)
@@ -76,6 +83,7 @@ GEM
76
83
  diff-lcs (>= 1.2.0, < 2.0)
77
84
  rspec-support (~> 3.13.0)
78
85
  rspec-support (3.13.1)
86
+ securerandom (0.3.1)
79
87
  simplecov (0.22.0)
80
88
  docile (~> 1.1)
81
89
  simplecov-html (~> 0.11)
@@ -99,9 +107,10 @@ DEPENDENCIES
99
107
  activejob
100
108
  byebug
101
109
  factory_bot
110
+ fugit
102
111
  karafka!
103
- karafka-testing (>= 2.4.0)
104
- karafka-web (>= 0.10.0.beta1)
112
+ karafka-testing (>= 2.4.6)
113
+ karafka-web (>= 0.10.0.rc2)
105
114
  ostruct
106
115
  rspec
107
116
  simplecov
data/bin/integrations CHANGED
@@ -240,6 +240,11 @@ ARGV.each do |filter|
240
240
  end
241
241
  end
242
242
 
243
+ # Remove Rails 7.2 specs from Ruby 3.0 because it requires 3.1
244
+ specs.delete_if do |spec|
245
+ RUBY_VERSION < '3.1' && spec.include?('rails72')
246
+ end
247
+
243
248
  raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
244
249
 
245
250
  # Randomize order
data/certs/cert.pem ADDED
@@ -0,0 +1,26 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
+ MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
5
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
7
+ PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
8
+ ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
9
+ Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
10
+ JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
11
+ T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
12
+ flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
13
+ vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
14
+ iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
16
+ sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
18
+ eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
19
+ Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
20
+ agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
21
+ TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
22
+ Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
23
+ oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
24
+ i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
25
+ ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
26
+ -----END CERTIFICATE-----
@@ -33,6 +33,8 @@ en:
33
33
  internal.processing.partitioner_class_format: cannot be nil
34
34
  internal.processing.strategy_selector_format: cannot be nil
35
35
  internal.processing.expansions_selector_format: cannot be nil
36
+ internal.processing.executor_class_format: cannot be nil
37
+ internal.processing.worker_job_call_wrapper_format: 'needs to be false or respond to #wrap'
36
38
 
37
39
  internal.active_job.dispatcher_format: cannot be nil
38
40
  internal.active_job.job_options_contract_format: cannot be nil
@@ -113,10 +115,12 @@ en:
113
115
  dead_letter_queue.transactional_format: needs to be either true or false
114
116
  dead_letter_queue.dispatch_method_format: 'needs to be either #produce_sync or #produce_async'
115
117
  dead_letter_queue.marking_method_format: 'needs to be either #mark_as_consumed or #mark_as_consumed!'
118
+ dead_letter_queue.mark_after_dispatch_format: 'needs to be true, false or nil'
116
119
 
117
120
  active_format: needs to be either true or false
118
121
 
119
122
  eofed.active_format: needs to be either true or false
123
+ eofed.kafka_enable: 'cannot be enabled without enable.partition.eof set to true'
120
124
 
121
125
  declaratives.partitions_format: needs to be more or equal to 1
122
126
  declaratives.active_format: needs to be true
@@ -136,6 +140,7 @@ en:
136
140
  consumer_group:
137
141
  missing: needs to be present
138
142
  topics_names_not_unique: all topic names within a single consumer group must be unique
143
+ topics_many_consumers_same_topic: 'topic within a single consumer group cannot have distinct consumers'
139
144
  id_format: 'needs to be a string with a Kafka accepted format'
140
145
  topics_format: needs to be a non-empty array
141
146
  topics_namespaced_names_not_unique: |
@@ -63,6 +63,10 @@ en:
63
63
  swarm.nodes_format: needs to be a range, array of nodes ids or a hash with direct assignments
64
64
  swarm_nodes_with_non_existent_nodes: includes unreachable nodes ids
65
65
 
66
+ recurring_tasks.active_format: 'needs to be boolean'
67
+ scheduled_messages.active_format: 'needs to be boolean'
68
+ scheduled_messages.active_missing: 'needs to be boolean'
69
+
66
70
  direct_assignments.active_missing: needs to be present
67
71
  direct_assignments.active_format: 'needs to be boolean'
68
72
  direct_assignments.partitions_missing: 'needs to be present'
@@ -99,5 +103,35 @@ en:
99
103
  patterns.ttl_format: needs to be an integer bigger than 0
100
104
  patterns.ttl_missing: needs to be present
101
105
 
106
+ recurring_tasks.consumer_class_format: 'needs to inherit from Karafka::BaseConsumer'
107
+ recurring_tasks.group_id_format: 'needs to be a string with a Kafka accepted format'
108
+ recurring_tasks.topics.schedules_format: 'needs to be a string with a Kafka accepted format'
109
+ recurring_tasks.topics.logs_format: 'needs to be a string with a Kafka accepted format'
110
+ recurring_tasks.interval_format: 'needs to be equal or more than 1000 and an integer'
111
+ recurring_tasks.deserializer_format: 'needs to be configured'
112
+ recurring_tasks.logging_format: needs to be a boolean
113
+
114
+ scheduled_messages.consumer_class_format: 'must be a class'
115
+ scheduled_messages.dispatcher_class_format: 'must be a class'
116
+ scheduled_messages.flush_batch_size_format: needs to be an integer bigger than 0
117
+ scheduled_messages.interval_format: needs to be an integer bigger or equal to 1000
118
+ scheduled_messages.deserializers.headers_format: cannot be nil
119
+ scheduled_messages.deserializers.payload_format: cannot be nil
120
+ scheduled_messages.group_id_format: 'needs to be a string with a Kafka accepted format'
121
+ scheduled_messages.states_postfix_format: 'needs to be a string with a Kafka accepted format'
122
+
102
123
  routing:
103
124
  swarm_nodes_not_used: 'At least one of the nodes has no assignments'
125
+
126
+ recurring_tasks:
127
+ id_format: 'can include only alphanumeric characters (a-z, A-Z, 0-9), hyphens (-), and underscores (_)'
128
+ cron_format: must be a non-empty string
129
+ enabled_format: needs to be a boolean
130
+ changed_format: needs to be a boolean
131
+ previous_time_format: needs to be a numerical or time
132
+
133
+ scheduled_messages_message:
134
+ key_missing: must be present and should be unique within the partition
135
+ key_format: needs to be a non-empty string unique within the partition
136
+ headers_schedule_target_epoch_in_the_past: 'scheduling cannot happen in the past'
137
+ headers_format: are not correct
data/karafka.gemspec CHANGED
@@ -33,7 +33,7 @@ Gem::Specification.new do |spec|
33
33
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
34
34
  end
35
35
 
36
- spec.cert_chain = %w[certs/cert_chain.pem]
36
+ spec.cert_chain = %w[certs/cert.pem]
37
37
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
38
38
  spec.executables = %w[karafka]
39
39
  spec.require_paths = %w[lib]
data/lib/karafka/admin.rb CHANGED
@@ -274,6 +274,48 @@ module Karafka
274
274
  end
275
275
  end
276
276
 
277
+ # Takes consumer group and its topics and migrates all the offsets to a new named group
278
+ #
279
+ # @param previous_name [String] old consumer group name
280
+ # @param new_name [String] new consumer group name
281
+ # @param topics [Array<String>] topics for which we want to migrate offsets during rename
282
+ # @param delete_previous [Boolean] should we delete previous consumer group after rename.
283
+ # Defaults to true.
284
+ #
285
+ # @note This method should **not** be executed on a running consumer group as it creates a
286
+ # "fake" consumer and uses it to move offsets.
287
+ #
288
+ # @note After migration unless `delete_previous` is set to `false`, old group will be
289
+ # removed.
290
+ #
291
+ # @note If new consumer group exists, old offsets will be added to it.
292
+ def rename_consumer_group(previous_name, new_name, topics, delete_previous: true)
293
+ remap = Hash.new { |h, k| h[k] = {} }
294
+
295
+ old_lags = read_lags_with_offsets({ previous_name => topics })
296
+
297
+ return if old_lags.empty?
298
+
299
+ read_lags_with_offsets({ previous_name => topics })
300
+ .fetch(previous_name)
301
+ .each do |topic, partitions|
302
+ partitions.each do |partition_id, details|
303
+ offset = details[:offset]
304
+
305
+ # No offset on this partition
306
+ next if offset.negative?
307
+
308
+ remap[topic][partition_id] = offset
309
+ end
310
+ end
311
+
312
+ seek_consumer_group(new_name, remap)
313
+
314
+ return unless delete_previous
315
+
316
+ delete_consumer_group(previous_name)
317
+ end
318
+
277
319
  # Removes given consumer group (if exists)
278
320
  #
279
321
  # @param consumer_group_id [String] consumer group name
@@ -31,6 +31,20 @@ module Karafka
31
31
  @used = false
32
32
  end
33
33
 
34
+ # Trigger method running after consumer is fully initialized.
35
+ #
36
+ # @private
37
+ def on_initialized
38
+ handle_initialized
39
+ rescue StandardError => e
40
+ Karafka.monitor.instrument(
41
+ 'error.occurred',
42
+ error: e,
43
+ caller: self,
44
+ type: 'consumer.initialized.error'
45
+ )
46
+ end
47
+
34
48
  # Can be used to run preparation code prior to the job being enqueued
35
49
  #
36
50
  # @private
@@ -176,6 +190,15 @@ module Karafka
176
190
 
177
191
  private
178
192
 
193
+ # Method called post-initialization of a consumer when all basic things are assigned.
194
+ # Since initialization via `#initialize` is complex and some states are set a bit later, this
195
+ # hook allows to initialize resources once at a time when topic, partition and other things
196
+ # are assigned to the consumer
197
+ #
198
+ # @note Please keep in mind that it will run many times when persistence is off. Basically once
199
+ # each batch.
200
+ def initialized; end
201
+
179
202
  # Method that will perform business logic and on data received from Kafka (it will consume
180
203
  # the data)
181
204
  # @note This method needs to be implemented in a subclass. We stub it here as a failover if
@@ -116,6 +116,8 @@ module Karafka
116
116
  required(:partitioner_class) { |val| !val.nil? }
117
117
  required(:strategy_selector) { |val| !val.nil? }
118
118
  required(:expansions_selector) { |val| !val.nil? }
119
+ required(:executor_class) { |val| !val.nil? }
120
+ required(:worker_job_call_wrapper) { |val| val == false || val.respond_to?(:wrap) }
119
121
  end
120
122
 
121
123
  nested(:active_job) do
@@ -25,6 +25,23 @@ module Karafka
25
25
  [[%i[topics], :names_not_unique]]
26
26
  end
27
27
 
28
+ # Prevent same topics subscriptions in one CG with different consumer classes
29
+ # This should prevent users from accidentally creating multi-sg one CG setup with weird
30
+ # different consumer usage. If you need to consume same topic twice, use distinct CGs.
31
+ virtual do |data, errors|
32
+ next unless errors.empty?
33
+
34
+ topics_consumers = Hash.new { |h, k| h[k] = Set.new }
35
+
36
+ data.fetch(:topics).map do |topic|
37
+ topics_consumers[topic[:name]] << topic[:consumer]
38
+ end
39
+
40
+ next if topics_consumers.values.map(&:size).all? { |count| count == 1 }
41
+
42
+ [[%i[topics], :many_consumers_same_topic]]
43
+ end
44
+
28
45
  virtual do |data, errors|
29
46
  next unless errors.empty?
30
47
  next unless ::Karafka::App.config.strict_topics_namespacing
@@ -82,10 +82,11 @@ module Karafka
82
82
  AssignmentLostError = Class.new(BaseError)
83
83
 
84
84
  # Raised if optional dependencies like karafka-web are required in a version that is not
85
- # supported by the current framework version.
85
+ # supported by the current framework version or when an optional dependency is missing.
86
86
  #
87
87
  # Because we do not want to require web out of the box and we do not want to lock web with
88
- # karafka 1:1, we do such a sanity check
88
+ # karafka 1:1, we do such a sanity check. This also applies to cases where some external
89
+ # optional dependencies are needed but not available.
89
90
  DependencyConstraintsError = Class.new(BaseError)
90
91
 
91
92
  # Raised when we were not able to open pidfd for given pid
@@ -275,6 +275,9 @@ module Karafka
275
275
  details = (error.backtrace || []).join("\n")
276
276
 
277
277
  case type
278
+ when 'consumer.initialized.error'
279
+ error "Consumer initialized error: #{error}"
280
+ error details
278
281
  when 'consumer.consume.error'
279
282
  error "Consumer consuming error: #{error}"
280
283
  error details
@@ -48,6 +48,9 @@ module Karafka
48
48
  connection.listener.stopping
49
49
  connection.listener.stopped
50
50
 
51
+ consumer.initialize
52
+ consumer.initialized
53
+
51
54
  consumer.before_schedule_consume
52
55
  consumer.consume
53
56
  consumer.consumed
@@ -23,11 +23,16 @@ module Karafka
23
23
  # @param action_name [String] action name. For processing this should be equal to
24
24
  # consumer class + method name
25
25
  def start_transaction(action_name)
26
- transaction = ::Appsignal::Transaction.create(
27
- SecureRandom.uuid,
28
- namespace_name,
29
- ::Appsignal::Transaction::GenericRequest.new({})
30
- )
26
+ transaction =
27
+ if version_4_or_newer?
28
+ ::Appsignal::Transaction.create(namespace_name)
29
+ else
30
+ ::Appsignal::Transaction.create(
31
+ SecureRandom.uuid,
32
+ namespace_name,
33
+ ::Appsignal::Transaction::GenericRequest.new({})
34
+ )
35
+ end
31
36
 
32
37
  transaction.set_action_if_nil(action_name)
33
38
  end
@@ -45,10 +50,10 @@ module Karafka
45
50
  def metadata=(metadata_hash)
46
51
  return unless transaction?
47
52
 
48
- transaction = ::Appsignal::Transaction.current
53
+ current_transaction = transaction
49
54
 
50
55
  stringify_hash(metadata_hash).each do |key, value|
51
- transaction.set_metadata(key, value)
56
+ current_transaction.set_metadata(key, value)
52
57
  end
53
58
  end
54
59
 
@@ -78,15 +83,20 @@ module Karafka
78
83
  )
79
84
  end
80
85
 
81
- # Sends the error that occurred to Appsignal
86
+ # Report the error that occurred to Appsignal
82
87
  #
83
88
  # @param error [Object] error we want to ship to Appsignal
84
- def send_error(error)
89
+ def report_error(error)
90
+ if ::Appsignal.respond_to?(:report_error)
91
+ # This helper will always report the error
92
+ ::Appsignal.report_error(error) do |transaction|
93
+ transaction.set_namespace(namespace_name)
94
+ end
85
95
  # If we have an active transaction we should use it instead of creating a generic one
86
96
  # That way proper namespace and other data may be transferred
87
97
  #
88
98
  # In case there is no transaction, a new generic background job one will be used
89
- if transaction?
99
+ elsif transaction?
90
100
  transaction.set_error(error)
91
101
  else
92
102
  ::Appsignal.send_error(error) do |transaction|
@@ -99,7 +109,11 @@ module Karafka
99
109
  # @param name [Symbol] probe name
100
110
  # @param probe [Proc] code to run every minute
101
111
  def register_probe(name, probe)
102
- ::Appsignal::Minutely.probes.register(name, probe)
112
+ if ::Appsignal::Probes.respond_to?(:register)
113
+ ::Appsignal::Probes.register(name, probe)
114
+ else
115
+ ::Appsignal::Minutely.probes.register(name, probe)
116
+ end
103
117
  end
104
118
 
105
119
  private
@@ -129,6 +143,13 @@ module Karafka
129
143
  def namespace_name
130
144
  @namespace_name ||= ::Appsignal::Transaction::BACKGROUND_JOB
131
145
  end
146
+
147
+ # @return [Boolean] is this v4+ version of Appsignal gem or older. Used for backwards
148
+ # compatibility checks.
149
+ def version_4_or_newer?
150
+ @version_4_or_newer ||=
151
+ Gem::Version.new(Appsignal::VERSION) >= Gem::Version.new('4.0.0')
152
+ end
132
153
  end
133
154
  end
134
155
  end
@@ -21,7 +21,7 @@ module Karafka
21
21
  #
22
22
  # @param event [Karafka::Core::Monitoring::Event]
23
23
  def on_error_occurred(event)
24
- client.send_error(event[:error])
24
+ client.report_error(event[:error])
25
25
  end
26
26
  end
27
27
  end
@@ -51,6 +51,12 @@ module Karafka
51
51
  @deserialized
52
52
  end
53
53
 
54
+ # @return [Boolean] true if the message has a key and raw payload is nil, it is a tombstone
55
+ # event. Otherwise it is not.
56
+ def tombstone?
57
+ !raw_key.nil? && @raw_payload.nil?
58
+ end
59
+
54
60
  private
55
61
 
56
62
  # @return [Object] deserialized data
@@ -75,7 +75,9 @@ module Karafka
75
75
  def features
76
76
  [
77
77
  Encryption,
78
- Cleaner
78
+ Cleaner,
79
+ RecurringTasks,
80
+ ScheduledMessages
79
81
  ]
80
82
  end
81
83
 
@@ -135,7 +135,12 @@ module Karafka
135
135
 
136
136
  dispatch = lambda do
137
137
  dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
138
- mark_dispatched_to_dlq(skippable_message)
138
+
139
+ if mark_after_dispatch?
140
+ mark_dispatched_to_dlq(skippable_message)
141
+ else
142
+ coordinator.seek_offset = skippable_message.offset + 1
143
+ end
139
144
  end
140
145
 
141
146
  if dispatch_in_a_transaction?
@@ -193,6 +198,16 @@ module Karafka
193
198
  producer.transactional? && topic.dead_letter_queue.transactional?
194
199
  end
195
200
 
201
+ # @return [Boolean] should we mark given message as consumed after dispatch.
202
+ # For default non MOM strategies if user did not explicitly tell us not to, we mark
203
+ # it. Default is `nil`, which means `true` in this case. If user provided alternative
204
+ # value, we go with it.
205
+ def mark_after_dispatch?
206
+ return true if topic.dead_letter_queue.mark_after_dispatch.nil?
207
+
208
+ topic.dead_letter_queue.mark_after_dispatch
209
+ end
210
+
196
211
  # Runs the DLQ strategy and based on it it performs certain operations
197
212
  #
198
213
  # In case of `:skip` and `:dispatch` will run the exact flow provided in a block
@@ -55,7 +55,11 @@ module Karafka
55
55
  skippable_message, _marked = find_skippable_message
56
56
  dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
57
57
 
58
- coordinator.seek_offset = skippable_message.offset + 1
58
+ if mark_after_dispatch?
59
+ mark_dispatched_to_dlq(skippable_message)
60
+ else
61
+ coordinator.seek_offset = skippable_message.offset + 1
62
+ end
59
63
  end
60
64
  end
61
65
  end
@@ -46,11 +46,27 @@ module Karafka
46
46
  skippable_message, _marked = find_skippable_message
47
47
  dispatch_to_dlq(skippable_message) if dispatch_to_dlq?
48
48
 
49
- coordinator.seek_offset = skippable_message.offset + 1
49
+ if mark_after_dispatch?
50
+ mark_dispatched_to_dlq(skippable_message)
51
+ else
52
+ coordinator.seek_offset = skippable_message.offset + 1
53
+ end
50
54
  end
51
55
  end
52
56
  end
53
57
  end
58
+
59
+ # @return [Boolean] should we mark given message as consumed after dispatch. For
60
+ # MOM strategies if user did not explicitly tell us to mark, we do not mark. Default
61
+ # is `nil`, which means `false` in this case. If user provided alternative value, we
62
+ # go with it.
63
+ #
64
+ # @note Please note, this is the opposite behavior than in case of AOM strategies.
65
+ def mark_after_dispatch?
66
+ return false if topic.dead_letter_queue.mark_after_dispatch.nil?
67
+
68
+ topic.dead_letter_queue.mark_after_dispatch
69
+ end
54
70
  end
55
71
  end
56
72
  end