karafka 2.5.1 → 2.5.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (151) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci_linux_ubuntu_x86_64_gnu.yml +3 -29
  3. data/.github/workflows/ci_macos_arm64.yml +1 -1
  4. data/.github/workflows/push.yml +2 -2
  5. data/.github/workflows/trigger-wiki-refresh.yml +1 -1
  6. data/.ruby-version +1 -1
  7. data/CHANGELOG.md +14 -4
  8. data/Gemfile +0 -2
  9. data/Gemfile.lock +30 -31
  10. data/bin/integrations +2 -1
  11. data/bin/rspecs +4 -0
  12. data/config/locales/errors.yml +6 -4
  13. data/config/locales/pro_errors.yml +5 -4
  14. data/docker-compose.yml +1 -1
  15. data/examples/payloads/json/sample_set_02/download.json +191 -0
  16. data/examples/payloads/json/sample_set_03/event_type_1.json +18 -0
  17. data/examples/payloads/json/sample_set_03/event_type_2.json +263 -0
  18. data/examples/payloads/json/sample_set_03/event_type_3.json +41 -0
  19. data/karafka.gemspec +1 -1
  20. data/lib/active_job/queue_adapters/karafka_adapter.rb +1 -1
  21. data/lib/karafka/active_job/consumer.rb +5 -1
  22. data/lib/karafka/active_job/current_attributes/job_wrapper.rb +45 -0
  23. data/lib/karafka/active_job/current_attributes/loading.rb +1 -1
  24. data/lib/karafka/active_job/current_attributes/persistence.rb +19 -7
  25. data/lib/karafka/active_job/current_attributes.rb +1 -0
  26. data/lib/karafka/active_job/deserializer.rb +61 -0
  27. data/lib/karafka/active_job/dispatcher.rb +32 -12
  28. data/lib/karafka/active_job/job_options_contract.rb +2 -4
  29. data/lib/karafka/admin/acl.rb +8 -4
  30. data/lib/karafka/admin/configs/config.rb +6 -4
  31. data/lib/karafka/admin/consumer_groups.rb +74 -4
  32. data/lib/karafka/admin/topics.rb +40 -7
  33. data/lib/karafka/admin.rb +13 -4
  34. data/lib/karafka/base_consumer.rb +5 -5
  35. data/lib/karafka/cli/base.rb +1 -1
  36. data/lib/karafka/cli/contracts/server.rb +2 -4
  37. data/lib/karafka/cli/install.rb +1 -1
  38. data/lib/karafka/cli/topics/align.rb +1 -1
  39. data/lib/karafka/cli/topics/repartition.rb +2 -2
  40. data/lib/karafka/connection/client.rb +12 -2
  41. data/lib/karafka/connection/listeners_batch.rb +2 -3
  42. data/lib/karafka/connection/proxy.rb +11 -7
  43. data/lib/karafka/env.rb +1 -2
  44. data/lib/karafka/helpers/interval_runner.rb +4 -1
  45. data/lib/karafka/instrumentation/assignments_tracker.rb +17 -0
  46. data/lib/karafka/instrumentation/monitor.rb +1 -1
  47. data/lib/karafka/instrumentation/notifications.rb +1 -0
  48. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +2 -3
  49. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +2 -3
  50. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +8 -9
  51. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +2 -3
  52. data/lib/karafka/messages/builders/batch_metadata.rb +1 -1
  53. data/lib/karafka/messages/builders/message.rb +1 -1
  54. data/lib/karafka/messages/messages.rb +2 -3
  55. data/lib/karafka/patches/rdkafka/bindings.rb +6 -6
  56. data/lib/karafka/patches/rdkafka/opaque.rb +1 -1
  57. data/lib/karafka/pro/active_job/dispatcher.rb +7 -3
  58. data/lib/karafka/pro/active_job/job_options_contract.rb +2 -4
  59. data/lib/karafka/pro/cleaner/messages/messages.rb +2 -3
  60. data/lib/karafka/pro/cli/contracts/server.rb +2 -4
  61. data/lib/karafka/pro/cli/parallel_segments/base.rb +1 -2
  62. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +2 -2
  63. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +2 -2
  64. data/lib/karafka/pro/connection/manager.rb +2 -2
  65. data/lib/karafka/pro/encryption/contracts/config.rb +4 -6
  66. data/lib/karafka/pro/encryption/messages/parser.rb +3 -3
  67. data/lib/karafka/pro/instrumentation/performance_tracker.rb +3 -3
  68. data/lib/karafka/pro/iterator/expander.rb +1 -1
  69. data/lib/karafka/pro/iterator/tpl_builder.rb +1 -1
  70. data/lib/karafka/pro/iterator.rb +2 -2
  71. data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +2 -3
  72. data/lib/karafka/pro/processing/coordinators/filters_applier.rb +3 -3
  73. data/lib/karafka/pro/processing/filters/delayer.rb +1 -1
  74. data/lib/karafka/pro/processing/filters/expirer.rb +1 -1
  75. data/lib/karafka/pro/processing/filters/throttler.rb +1 -1
  76. data/lib/karafka/pro/processing/schedulers/default.rb +2 -4
  77. data/lib/karafka/pro/processing/strategies/lrj/default.rb +2 -4
  78. data/lib/karafka/pro/processing/strategies/vp/default.rb +2 -4
  79. data/lib/karafka/pro/processing/subscription_groups_coordinator.rb +2 -3
  80. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +2 -4
  81. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +2 -4
  82. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +6 -5
  83. data/lib/karafka/pro/recurring_tasks/schedule.rb +4 -6
  84. data/lib/karafka/pro/recurring_tasks.rb +8 -5
  85. data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +2 -4
  86. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
  87. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +2 -4
  88. data/lib/karafka/pro/routing/features/delaying/topic.rb +2 -4
  89. data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +4 -8
  90. data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +5 -7
  91. data/lib/karafka/pro/routing/features/direct_assignments/subscription_group.rb +7 -6
  92. data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +2 -2
  93. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +2 -4
  94. data/lib/karafka/pro/routing/features/expiring/topic.rb +2 -4
  95. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +2 -4
  96. data/lib/karafka/pro/routing/features/filtering/topic.rb +2 -3
  97. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +2 -4
  98. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +2 -4
  99. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +3 -5
  100. data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +3 -3
  101. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +2 -4
  102. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +2 -4
  103. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +3 -5
  104. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +2 -4
  105. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +2 -4
  106. data/lib/karafka/pro/routing/features/pausing/config.rb +26 -0
  107. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +17 -11
  108. data/lib/karafka/pro/routing/features/pausing/topic.rb +69 -8
  109. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +2 -4
  110. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +2 -4
  111. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +2 -4
  112. data/lib/karafka/pro/routing/features/swarm/contracts/routing.rb +2 -4
  113. data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +6 -8
  114. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +2 -4
  115. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +2 -4
  116. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +2 -4
  117. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +2 -4
  118. data/lib/karafka/pro/scheduled_messages.rb +4 -6
  119. data/lib/karafka/pro/swarm/liveness_listener.rb +2 -2
  120. data/lib/karafka/processing/coordinator.rb +2 -4
  121. data/lib/karafka/processing/coordinators_buffer.rb +2 -3
  122. data/lib/karafka/processing/executor.rb +2 -3
  123. data/lib/karafka/processing/jobs/base.rb +2 -3
  124. data/lib/karafka/processing/workers_batch.rb +2 -3
  125. data/lib/karafka/railtie.rb +1 -0
  126. data/lib/karafka/routing/activity_manager.rb +2 -2
  127. data/lib/karafka/routing/builder.rb +5 -7
  128. data/lib/karafka/routing/consumer_group.rb +4 -6
  129. data/lib/karafka/routing/contracts/consumer_group.rb +3 -5
  130. data/lib/karafka/routing/contracts/routing.rb +2 -4
  131. data/lib/karafka/routing/contracts/topic.rb +2 -4
  132. data/lib/karafka/routing/features/active_job/contracts/topic.rb +2 -4
  133. data/lib/karafka/routing/features/active_job/topic.rb +6 -0
  134. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +2 -4
  135. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +3 -5
  136. data/lib/karafka/routing/features/deserializers/contracts/topic.rb +2 -4
  137. data/lib/karafka/routing/features/eofed/contracts/topic.rb +2 -4
  138. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +2 -4
  139. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +2 -4
  140. data/lib/karafka/routing/topics.rb +4 -9
  141. data/lib/karafka/server.rb +1 -1
  142. data/lib/karafka/setup/config.rb +66 -9
  143. data/lib/karafka/setup/contracts/config.rb +12 -10
  144. data/lib/karafka/setup/defaults_injector.rb +3 -2
  145. data/lib/karafka/setup/dsl.rb +2 -3
  146. data/lib/karafka/swarm/liveness_listener.rb +2 -3
  147. data/lib/karafka/swarm/supervisor.rb +1 -1
  148. data/lib/karafka/version.rb +1 -1
  149. data/lib/karafka.rb +2 -2
  150. metadata +8 -2
  151. data/.diffend.yml +0 -3
@@ -0,0 +1,263 @@
1
+ {
2
+ "connect.name": "production.public.automation_actions.Envelope",
3
+ "connect.version": 1,
4
+ "fields": [
5
+ {
6
+ "default": null,
7
+ "name": "before",
8
+ "type": [
9
+ "null",
10
+ {
11
+ "connect.name": "production.public.automation_actions.Value",
12
+ "fields": [
13
+ {
14
+ "default": "00000000-0000-0000-0000-000000000000",
15
+ "name": "id",
16
+ "type": {
17
+ "connect.default": "00000000-0000-0000-0000-000000000000",
18
+ "connect.name": "io.debezium.data.Uuid",
19
+ "connect.version": 1,
20
+ "type": "string"
21
+ }
22
+ },
23
+ {
24
+ "name": "automation_id",
25
+ "type": "int"
26
+ },
27
+ {
28
+ "default": null,
29
+ "name": "card_id",
30
+ "type": [
31
+ "null",
32
+ "int"
33
+ ]
34
+ },
35
+ {
36
+ "name": "organization_id",
37
+ "type": "int"
38
+ },
39
+ {
40
+ "default": null,
41
+ "name": "user_id",
42
+ "type": [
43
+ "null",
44
+ "int"
45
+ ]
46
+ },
47
+ {
48
+ "name": "action_name",
49
+ "type": "string"
50
+ },
51
+ {
52
+ "default": null,
53
+ "name": "finished_at",
54
+ "type": [
55
+ "null",
56
+ {
57
+ "connect.name": "io.debezium.time.MicroTimestamp",
58
+ "connect.version": 1,
59
+ "type": "long"
60
+ }
61
+ ]
62
+ },
63
+ {
64
+ "default": 0,
65
+ "name": "status",
66
+ "type": [
67
+ {
68
+ "connect.default": 0,
69
+ "type": "int"
70
+ },
71
+ "null"
72
+ ]
73
+ },
74
+ {
75
+ "default": null,
76
+ "name": "deleted_at",
77
+ "type": [
78
+ "null",
79
+ {
80
+ "connect.name": "io.debezium.time.MicroTimestamp",
81
+ "connect.version": 1,
82
+ "type": "long"
83
+ }
84
+ ]
85
+ },
86
+ {
87
+ "name": "created_at",
88
+ "type": {
89
+ "connect.name": "io.debezium.time.MicroTimestamp",
90
+ "connect.version": 1,
91
+ "type": "long"
92
+ }
93
+ },
94
+ {
95
+ "name": "updated_at",
96
+ "type": {
97
+ "connect.name": "io.debezium.time.MicroTimestamp",
98
+ "connect.version": 1,
99
+ "type": "long"
100
+ }
101
+ },
102
+ {
103
+ "default": null,
104
+ "name": "response",
105
+ "type": [
106
+ "null",
107
+ "string"
108
+ ]
109
+ },
110
+ {
111
+ "default": null,
112
+ "name": "executed",
113
+ "type": [
114
+ "null",
115
+ "boolean"
116
+ ]
117
+ }
118
+ ],
119
+ "name": "Value",
120
+ "type": "record"
121
+ }
122
+ ]
123
+ },
124
+ {
125
+ "default": null,
126
+ "name": "after",
127
+ "type": [
128
+ "null",
129
+ "Value"
130
+ ]
131
+ },
132
+ {
133
+ "name": "source",
134
+ "type": {
135
+ "connect.name": "io.debezium.connector.v2.postgresql.Source",
136
+ "fields": [
137
+ {
138
+ "name": "version",
139
+ "type": "string"
140
+ },
141
+ {
142
+ "name": "connector",
143
+ "type": "string"
144
+ },
145
+ {
146
+ "name": "name",
147
+ "type": "string"
148
+ },
149
+ {
150
+ "name": "ts_ms",
151
+ "type": "long"
152
+ },
153
+ {
154
+ "default": "false",
155
+ "name": "snapshot",
156
+ "type": [
157
+ {
158
+ "connect.default": "false",
159
+ "connect.name": "io.debezium.data.Enum",
160
+ "connect.parameters": {
161
+ "allowed": "true,last,false,incremental"
162
+ },
163
+ "connect.version": 1,
164
+ "type": "string"
165
+ },
166
+ "null"
167
+ ]
168
+ },
169
+ {
170
+ "name": "db",
171
+ "type": "string"
172
+ },
173
+ {
174
+ "default": null,
175
+ "name": "sequence",
176
+ "type": [
177
+ "null",
178
+ "string"
179
+ ]
180
+ },
181
+ {
182
+ "name": "schema",
183
+ "type": "string"
184
+ },
185
+ {
186
+ "name": "table",
187
+ "type": "string"
188
+ },
189
+ {
190
+ "default": null,
191
+ "name": "txId",
192
+ "type": [
193
+ "null",
194
+ "long"
195
+ ]
196
+ },
197
+ {
198
+ "default": null,
199
+ "name": "lsn",
200
+ "type": [
201
+ "null",
202
+ "long"
203
+ ]
204
+ },
205
+ {
206
+ "default": null,
207
+ "name": "xmin",
208
+ "type": [
209
+ "null",
210
+ "long"
211
+ ]
212
+ }
213
+ ],
214
+ "name": "Source",
215
+ "namespace": "io.debezium.connector.v2.postgresql",
216
+ "type": "record"
217
+ }
218
+ },
219
+ {
220
+ "name": "op",
221
+ "type": "string"
222
+ },
223
+ {
224
+ "default": null,
225
+ "name": "ts_ms",
226
+ "type": [
227
+ "null",
228
+ "long"
229
+ ]
230
+ },
231
+ {
232
+ "default": null,
233
+ "name": "transaction",
234
+ "type": [
235
+ "null",
236
+ {
237
+ "connect.name": "event.block",
238
+ "connect.version": 1,
239
+ "fields": [
240
+ {
241
+ "name": "id",
242
+ "type": "string"
243
+ },
244
+ {
245
+ "name": "total_order",
246
+ "type": "long"
247
+ },
248
+ {
249
+ "name": "data_collection_order",
250
+ "type": "long"
251
+ }
252
+ ],
253
+ "name": "block",
254
+ "namespace": "event",
255
+ "type": "record"
256
+ }
257
+ ]
258
+ }
259
+ ],
260
+ "name": "Envelope",
261
+ "namespace": "production.public.automation_actions",
262
+ "type": "record"
263
+ }
@@ -0,0 +1,41 @@
1
+ [
2
+ {
3
+ "timestamp": 1111111111111,
4
+ "timestampType": "TYPE",
5
+ "partition": 0,
6
+ "offset": 0,
7
+ "key": "0000000000000000000000000000000000000000000000000000000000000000",
8
+ "value": {
9
+ "id": "0000000000000000000000000000000000000000000000000000000000000000",
10
+ "type": "event_type",
11
+ "data": {
12
+ "user_id": 123456789,
13
+ "user_email": "user@example.com",
14
+ "org_id": 987654321,
15
+ "token_id": 111111111,
16
+ "token_type": "token_type",
17
+ "request_id": "request-id",
18
+ "request_host": "api.example.com",
19
+ "request_path": "/some/path",
20
+ "remote_ip": "0.0.0.0",
21
+ "request_graphql": true,
22
+ "user_agent": "client-library/x.y.z",
23
+ "request_method": "METHOD",
24
+ "graphql_data": {
25
+ "errored": false,
26
+ "complexity": 0,
27
+ "depth": 0,
28
+ "operation_name": null,
29
+ "operation_type": "operation",
30
+ "resources": ["resource"]
31
+ },
32
+ "response_code": "200",
33
+ "type": "custom"
34
+ },
35
+ "source": "some-source",
36
+ "version": 1,
37
+ "timestamp": "1111111111111"
38
+ },
39
+ "headers": []
40
+ }
41
+ ]
data/karafka.gemspec CHANGED
@@ -7,7 +7,7 @@ require 'karafka/version'
7
7
 
8
8
  Gem::Specification.new do |spec|
9
9
  spec.name = 'karafka'
10
- spec.version = ::Karafka::VERSION
10
+ spec.version = Karafka::VERSION
11
11
  spec.platform = Gem::Platform::RUBY
12
12
  spec.authors = ['Maciej Mensfeld']
13
13
  spec.email = %w[contact@karafka.io]
@@ -67,7 +67,7 @@ module ActiveJob
67
67
 
68
68
  # @return [Boolean] should we stop the job. Used by the ActiveJob continuation feature
69
69
  def stopping?
70
- Karafka::App.done?
70
+ ::Karafka::App.done?
71
71
  end
72
72
  end
73
73
  end
@@ -6,6 +6,10 @@ module Karafka
6
6
  # This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
7
7
  # It marks the offset after each message, so we make sure, none of the jobs is executed twice
8
8
  class Consumer < ::Karafka::BaseConsumer
9
+ include Helpers::ConfigImporter.new(
10
+ deserializer: %i[internal active_job deserializer]
11
+ )
12
+
9
13
  # Executes the ActiveJob logic
10
14
  # @note ActiveJob does not support batches, so we just run one message after another
11
15
  def consume
@@ -42,7 +46,7 @@ module Karafka
42
46
  # We technically speaking could set this as deserializer and reference it from the
43
47
  # message instead of using the `#raw_payload`. This is not done on purpose to simplify
44
48
  # the ActiveJob setup here
45
- yield ::ActiveSupport::JSON.decode(job_message.raw_payload)
49
+ yield deserializer.deserialize(job_message)
46
50
  end
47
51
  end
48
52
  end
@@ -0,0 +1,45 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ module CurrentAttributes
6
+ # Simple wrapper that presents a job hash with current attributes injected.
7
+ #
8
+ # This wrapper exists to pass a modified job hash to the deserializer without
9
+ # modifying the original ActiveJob::Base instance. We cannot modify the job instance
10
+ # directly because:
11
+ #
12
+ # 1. Thread safety: Modifying job instances with singleton methods could cause
13
+ # concurrency issues in multi-threaded environments
14
+ # 2. Rails ownership: ActiveJob::Base is a Rails class we don't control, and
15
+ # monkey-patching it could break with Rails updates
16
+ # 3. Side effects: Modifying the job instance could affect other parts of the
17
+ # application that use the same job object
18
+ #
19
+ # The wrapper implements only the #serialize method that the Deserializer expects,
20
+ # returning our pre-computed hash with current attributes already injected.
21
+ #
22
+ # @example Using JobWrapper with a modified job hash
23
+ # job_hash = {
24
+ # 'job_class' => 'MyJob',
25
+ # 'arguments' => [1, 2, 3],
26
+ # 'cattr_0' => { 'user_id' => 123 }
27
+ # }
28
+ # wrapper = JobWrapper.new(job_hash)
29
+ # wrapper.serialize # => returns the job_hash
30
+ class JobWrapper
31
+ # @param job_hash [Hash] the job hash with current attributes already injected
32
+ def initialize(job_hash)
33
+ @job_hash = job_hash
34
+ end
35
+
36
+ # Returns the job hash with current attributes injected
37
+ #
38
+ # @return [Hash] the job hash with current attributes injected
39
+ def serialize
40
+ @job_hash
41
+ end
42
+ end
43
+ end
44
+ end
45
+ end
@@ -8,7 +8,7 @@ module Karafka
8
8
  module Loading
9
9
  # @param job_message [Karafka::Messages::Message] message with active job
10
10
  def with_deserialized_job(job_message)
11
- super(job_message) do |job|
11
+ super do |job|
12
12
  resetable = []
13
13
 
14
14
  _cattr_klasses.each do |key, cattr_klass_str|
@@ -4,23 +4,35 @@ module Karafka
4
4
  module ActiveJob
5
5
  module CurrentAttributes
6
6
  # Module adding the current attributes persistence into the ActiveJob jobs
7
+ # This module wraps the Dispatcher#serialize_job to inject current attributes
7
8
  module Persistence
8
- # Alters the job serialization to inject the current attributes into the json before we
9
- # send it to Kafka
9
+ # Wraps the Dispatcher#serialize_job to inject current attributes before serialization
10
+ # This allows us to modify the job before it's serialized without modifying ActiveJob::Base
10
11
  #
11
- # @param job [ActiveJob::Base] job
12
+ # @param job [ActiveJob::Base] the original job to serialize
13
+ # @return [String] serialized job payload with current attributes injected
14
+ #
15
+ # @note This method creates a JobWrapper internally and passes it to the parent's
16
+ # serialize_job method. The wrapper is transparent to the deserializer.
12
17
  def serialize_job(job)
13
- json = super(job)
18
+ # Get the job hash
19
+ job_hash = job.serialize
14
20
 
21
+ # Inject current attributes
15
22
  _cattr_klasses.each do |key, cattr_klass_str|
16
- next if json.key?(key)
23
+ next if job_hash.key?(key)
17
24
 
18
25
  attrs = cattr_klass_str.constantize.attributes
19
26
 
20
- json[key] = attrs unless attrs.empty?
27
+ job_hash[key] = attrs unless attrs.empty?
21
28
  end
22
29
 
23
- json
30
+ # Wrap the modified hash in a simple object that implements #serialize
31
+ # This avoids modifying the original job instance
32
+ wrapper = JobWrapper.new(job_hash)
33
+
34
+ # Pass the wrapper to the deserializer
35
+ super(wrapper)
24
36
  end
25
37
  end
26
38
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'active_support/current_attributes'
4
+ require_relative 'current_attributes/job_wrapper'
4
5
  require_relative 'current_attributes/loading'
5
6
  require_relative 'current_attributes/persistence'
6
7
 
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ # Default deserializer for ActiveJob jobs
6
+ #
7
+ # @note Despite the name, this class handles both serialization (job to Kafka payload) and
8
+ # deserialization (Kafka message to job). It's called "Deserializer" to align with Karafka's
9
+ # naming conventions where message consumption is the primary concern.
10
+ #
11
+ # This class can be inherited and its methods can be overridden to support
12
+ # custom payload formats (e.g., Avro, Protobuf, MessagePack)
13
+ #
14
+ # @example Wrapping jobs in a custom envelope with metadata
15
+ # class EnvelopedJobDeserializer < Karafka::ActiveJob::Deserializer
16
+ # def serialize(job)
17
+ # # Wrap the job in an envelope with additional metadata
18
+ # envelope = {
19
+ # version: 1,
20
+ # produced_at: Time.now.iso8601,
21
+ # producer: 'my-app',
22
+ # payload: job.serialize
23
+ # }
24
+ # ::ActiveSupport::JSON.encode(envelope)
25
+ # end
26
+ #
27
+ # def deserialize(message)
28
+ # # Extract the job from the envelope
29
+ # envelope = ::ActiveSupport::JSON.decode(message.raw_payload)
30
+ #
31
+ # # Could validate envelope version, log metadata, etc.
32
+ # raise 'Unsupported version' if envelope['version'] != 1
33
+ #
34
+ # # Return the actual job data
35
+ # envelope['payload']
36
+ # end
37
+ # end
38
+ #
39
+ # # Configure in Karafka
40
+ # Karafka::App.config.internal.active_job.deserializer = EnvelopedJobDeserializer.new
41
+ class Deserializer
42
+ # Serializes an ActiveJob job into a string payload for Kafka
43
+ #
44
+ # @param job [ActiveJob::Base, #serialize] job to serialize. The job must respond to
45
+ # #serialize which returns a Hash of job attributes. When CurrentAttributes are used,
46
+ # this may be a JobWrapper instance instead of the original ActiveJob::Base.
47
+ # @return [String] serialized job payload
48
+ def serialize(job)
49
+ ::ActiveSupport::JSON.encode(job.serialize)
50
+ end
51
+
52
+ # Deserializes a Kafka message payload into an ActiveJob job hash
53
+ #
54
+ # @param message [Karafka::Messages::Message] message containing the job
55
+ # @return [Hash] deserialized job hash
56
+ def deserialize(message)
57
+ ::ActiveSupport::JSON.decode(message.raw_payload)
58
+ end
59
+ end
60
+ end
61
+ end
@@ -4,6 +4,10 @@ module Karafka
4
4
  module ActiveJob
5
5
  # Dispatcher that sends the ActiveJob job to a proper topic based on the queue name
6
6
  class Dispatcher
7
+ include Helpers::ConfigImporter.new(
8
+ deserializer: %i[internal active_job deserializer]
9
+ )
10
+
7
11
  # Defaults for dispatching
8
12
  # The can be updated by using `#karafka_options` on the job
9
13
  DEFAULTS = {
@@ -18,7 +22,7 @@ module Karafka
18
22
  ::Karafka.producer.public_send(
19
23
  fetch_option(job, :dispatch_method, DEFAULTS),
20
24
  topic: job.queue_name,
21
- payload: ::ActiveSupport::JSON.encode(serialize_job(job))
25
+ payload: serialize_job(job)
22
26
  )
23
27
  end
24
28
 
@@ -34,7 +38,7 @@ module Karafka
34
38
 
35
39
  dispatches[d_method] << {
36
40
  topic: job.queue_name,
37
- payload: ::ActiveSupport::JSON.encode(serialize_job(job))
41
+ payload: serialize_job(job)
38
42
  }
39
43
  end
40
44
 
@@ -48,14 +52,18 @@ module Karafka
48
52
 
49
53
  # Raises info, that Karafka backend does not support scheduling jobs if someone wants to
50
54
  # schedule jobs in the future. It works for past and present because we want to support
51
- # things like continuation and `#retry_on` API with no wait and no jitter
55
+ # things like continuation and `#retry_on` API with no wait and no jitter.
52
56
  #
53
57
  # @param job [Object] job we cannot enqueue
54
58
  # @param timestamp [Time] time when job should run
55
59
  #
56
- # @note Karafka Pro supports future jobs
60
+ # @note Karafka Pro supports future jobs via the Scheduled Messages feature
61
+ #
62
+ # @note For ActiveJob Continuation to work without Pro, configure your continuable jobs:
63
+ # self.resume_options = { wait: 0 }
57
64
  #
58
- # @note In order for jobs to work with this you need to set jitter to false and no wait
65
+ # @note For `#retry_on` to work without Pro, configure with:
66
+ # retry_on SomeError, wait: 0, jitter: 0
59
67
  def dispatch_at(job, timestamp)
60
68
  # Dispatch at is used by some of the ActiveJob features that actually do not back-off
61
69
  # but things go via this API nonetheless.
@@ -64,13 +72,31 @@ module Karafka
64
72
  else
65
73
  raise NotImplementedError, <<~ERROR_MESSAGE
66
74
  This queueing backend does not support scheduling future jobs.
67
- Consider using Karafka Pro, which supports this via the Scheduled Messages feature.
75
+
76
+ If you're using ActiveJob Continuation, configure your jobs with:
77
+ self.resume_options = { wait: 0 }
78
+
79
+ If you're using retry_on, configure with:
80
+ retry_on SomeError, wait: 0, jitter: 0
81
+
82
+ For full support of delayed job execution, consider using Karafka Pro with Scheduled Messages.
68
83
  ERROR_MESSAGE
69
84
  end
70
85
  end
71
86
 
72
87
  private
73
88
 
89
+ # Serializes a job using the configured deserializer
90
+ # This method serves as an extension point and can be wrapped by modules like
91
+ # CurrentAttributes::Persistence
92
+ #
93
+ # @param job [ActiveJob::Base, CurrentAttributes::Persistence::JobWrapper] job to serialize.
94
+ # When CurrentAttributes are used, this may be a JobWrapper instead of the original job.
95
+ # @return [String] serialized job payload
96
+ def serialize_job(job)
97
+ deserializer.serialize(job)
98
+ end
99
+
74
100
  # @param job [ActiveJob::Base] job
75
101
  # @param key [Symbol] key we want to fetch
76
102
  # @param defaults [Hash]
@@ -81,12 +107,6 @@ module Karafka
81
107
  .karafka_options
82
108
  .fetch(key, defaults.fetch(key))
83
109
  end
84
-
85
- # @param job [ActiveJob::Base] job
86
- # @return [Hash] json representation of the job
87
- def serialize_job(job)
88
- job.serialize
89
- end
90
110
  end
91
111
  end
92
112
  end
@@ -8,10 +8,8 @@ module Karafka
8
8
  # all in the same place
9
9
  class JobOptionsContract < Contracts::Base
10
10
  configure do |config|
11
- config.error_messages = YAML.safe_load(
12
- File.read(
13
- File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
14
- )
11
+ config.error_messages = YAML.safe_load_file(
12
+ File.join(Karafka.gem_root, 'config', 'locales', 'errors.yml')
15
13
  ).fetch('en').fetch('validations').fetch('job_options')
16
14
  end
17
15
 
@@ -98,8 +98,10 @@ module Karafka
98
98
  PERMISSION_TYPES_MAP
99
99
  ].freeze
100
100
 
101
- private_constant :RESOURCE_TYPES_MAP, :RESOURCE_PATTERNS_TYPE_MAP, :OPERATIONS_MAP,
102
- :PERMISSION_TYPES_MAP, :ALL_MAPS
101
+ private_constant(
102
+ :RESOURCE_TYPES_MAP, :RESOURCE_PATTERNS_TYPE_MAP, :OPERATIONS_MAP, :PERMISSION_TYPES_MAP,
103
+ :ALL_MAPS
104
+ )
103
105
 
104
106
  # Class level APIs that operate on Acl instances and/or return Acl instances.
105
107
  # @note For the sake of consistency all methods from this API return array of Acls
@@ -187,8 +189,10 @@ module Karafka
187
189
  end
188
190
  end
189
191
 
190
- attr_reader :resource_type, :resource_name, :resource_pattern_type, :principal, :host,
191
- :operation, :permission_type
192
+ attr_reader(
193
+ :resource_type, :resource_name, :resource_pattern_type, :principal, :host, :operation,
194
+ :permission_type
195
+ )
192
196
 
193
197
  # Initializes a new Acl instance with specified attributes.
194
198
  #
@@ -71,10 +71,12 @@ module Karafka
71
71
  def synonym? = @synonym.positive?
72
72
 
73
73
  # @return [Hash] hash that we can use to operate with rdkafka
74
- def to_native_hash = {
75
- name: name,
76
- value: value
77
- }.freeze
74
+ def to_native_hash
75
+ {
76
+ name: name,
77
+ value: value
78
+ }.freeze
79
+ end
78
80
  end
79
81
  end
80
82
  end