sbmt-kafka_producer 3.0.0 → 3.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: '01390ee8af51126fefcebfc7a1cdebc8c61cff6660d064568b01e1508f1bc616'
4
- data.tar.gz: 1402bc1e9e5b51ff2998411b6154711b9af3f583c539cab01deae04c9d8e29fc
3
+ metadata.gz: b9d0ac5904cd82c25700683f5b2955dd4b115328c6f854af93a4532407af48a2
4
+ data.tar.gz: af80b9874b6161f01b78e219a694850ab44b5cf234388d6a66ea0350af31e3a8
5
5
  SHA512:
6
- metadata.gz: db848372986b9b6107b3911dad1f36c62f6532cc25d22d573bd2c1ab4afb37e4e3c27616a2ad70611c8f5006e00bf0dd6e36821cdc91acf96d010f3f22a741fe
7
- data.tar.gz: 55082c9462c97377276ebc9c1b3406a409993b737828a0a13e72bd1c7eb09263ad68f61678b59b21178c6ada97665116b27d6fd09094b5e570f3546ac8ed9855
6
+ metadata.gz: 309463f869927645071f9cfa27ac92c0d10bb044f1ff9441ef3de54115fc05f2ea7286678acd98838278c0011fc047ec43a49408530c86ef18e626b54119ef84
7
+ data.tar.gz: f823b0dfba4f627953e25802d18bfe342646912a7e5c19285e8901b7c7d15512b37e114b0ee87bfb1c2de481abeab604b753d309ec96f8572c2dcdc1749f02d1
data/CHANGELOG.md CHANGED
@@ -13,6 +13,16 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
13
13
 
14
14
  ### Fixed
15
15
 
16
+ ## [3.1.0] - 2024-09-13
17
+
18
+ ### Added
19
+
20
+ - For synchronous messages and errors, we place logs in tags
21
+
22
+ ### Fixed
23
+
24
+ - Fixed mock for tests
25
+
16
26
  ## [3.0.0] - 2024-08-27
17
27
 
18
28
  ## BREAKING
@@ -5,14 +5,18 @@ module Sbmt
5
5
  class BaseProducer
6
6
  extend Dry::Initializer
7
7
 
8
+ MSG_SUCCESS = "Message has been successfully sent to Kafka"
9
+
8
10
  option :client, default: -> { KafkaClientFactory.default_client }
9
11
  option :topic
10
12
 
11
13
  def sync_publish!(payload, options = {})
12
- report = around_publish do
13
- client.produce_sync(payload: payload, **options.merge(topic: topic))
14
+ report, produce_duration = around_publish do
15
+ measure_time do
16
+ client.produce_sync(payload: payload, **options.merge(topic: topic))
17
+ end
14
18
  end
15
- log_success(report)
19
+ log_success(report, produce_duration)
16
20
  true
17
21
  end
18
22
 
@@ -78,12 +82,19 @@ module Sbmt
78
82
  def log_error(error)
79
83
  return true if ignore_kafka_errors?
80
84
 
81
- logger.error "KAFKA ERROR: #{format_exception_error(error)}\n#{error.backtrace.join("\n")}"
85
+ log_tags = {stacktrace: error.backtrace.join("\n")}
86
+
87
+ logger.tagged(log_tags) do
88
+ logger.send(:error, "KAFKA ERROR: #{format_exception_error(error)}")
89
+ end
90
+
82
91
  ErrorTracker.error(error)
83
92
  end
84
93
 
85
- def log_success(report)
86
- logger.info "Message has been successfully sent to Kafka - topic: #{report.topic_name}, partition: #{report.partition}, offset: #{report.offset}"
94
+ def log_success(report, produce_duration)
95
+ log_tags = {kafka: log_tags(report, produce_duration)}
96
+
97
+ log_with_tags(log_tags)
87
98
  end
88
99
 
89
100
  def format_exception_error(error)
@@ -100,6 +111,33 @@ module Sbmt
100
111
  error.respond_to?(:cause) && error.cause.present?
101
112
  end
102
113
 
114
+ def log_tags(report, produce_duration)
115
+ {
116
+ topic: report.topic_name,
117
+ partition: report.partition,
118
+ offset: report.offset,
119
+ produce_duration_ms: produce_duration
120
+ }
121
+ end
122
+
123
+ def log_with_tags(log_tags)
124
+ return unless logger.respond_to?(:tagged)
125
+
126
+ logger.tagged(log_tags) do
127
+ logger.send(:info, MSG_SUCCESS)
128
+ end
129
+ end
130
+
131
+ def measure_time
132
+ start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
133
+ result = yield
134
+ end_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
135
+
136
+ elapsed_time = end_time - start_time
137
+
138
+ [result, elapsed_time]
139
+ end
140
+
103
141
  def config
104
142
  Config::Producer
105
143
  end
@@ -20,6 +20,10 @@ module Sbmt
20
20
  def add(...)
21
21
  logger.add(...)
22
22
  end
23
+
24
+ def tagged(...)
25
+ logger.tagged(...)
26
+ end
23
27
  end
24
28
  end
25
29
  end
@@ -1,8 +1,10 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  class FakeWaterDropClient
4
+ Report = Struct.new(:topic_name, :partition, :offset)
5
+
4
6
  def produce_sync(*)
5
- # no op
7
+ Report.new("fake_topic", 0, 0)
6
8
  end
7
9
 
8
10
  def produce_async(*)
@@ -2,6 +2,6 @@
2
2
 
3
3
  module Sbmt
4
4
  module KafkaProducer
5
- VERSION = "3.0.0"
5
+ VERSION = "3.1.0"
6
6
  end
7
7
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: sbmt-kafka_producer
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.0
4
+ version: 3.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Kuper Ruby-Platform Team
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2024-08-29 00:00:00.000000000 Z
11
+ date: 2024-09-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: anyway_config
@@ -426,7 +426,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
426
426
  - !ruby/object:Gem::Version
427
427
  version: '0'
428
428
  requirements: []
429
- rubygems_version: 3.5.11
429
+ rubygems_version: 3.1.6
430
430
  signing_key:
431
431
  specification_version: 4
432
432
  summary: Ruby gem for producing Kafka messages