dogstatsd-ruby 4.8.3 → 5.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc2f6d365efc8cf25456fb9dbd5ad526abe0ec61f2b63e9b79346c767aa3a3b0
4
- data.tar.gz: 84a029741390b63cf0540de60a872c6b0820bf6ad4c1619cea120d71bc38c3bc
3
+ metadata.gz: f9f9d5b8de35189b467aae9471e7dcbd8a2913bfa94a0ebea428c4088ed1bf6e
4
+ data.tar.gz: 5a8ec414ba8e7b97dc5ff2d720b183ae12ce3e8f32dd022889276e02fa852ef2
5
5
  SHA512:
6
- metadata.gz: 769645a11d455fceb1b0c4d7d718005d869c62546cd1c255f0a45b9da51a300500dfe2c1a4e601248f1e8ef6ce1e40c15ebaca55d6e55092babef1c153ccdcf0
7
- data.tar.gz: 1429d390a2c52758f957dd4766cf30db5039986527982f3188fcc44d7c058431d9f85180b1af7e5dae8f82db31325115b0ea4efb1fced82d7585924b0702fd0c
6
+ metadata.gz: 0a89bc622a5fcb9c5f2376aa0573a3af843b3ce4952505b7d2076ef6f1ee58f6a8d2ef9b2498f919327bae37f05d43970c975a14a654019bef21cf26153d0080
7
+ data.tar.gz: 7efd1aaf9d2b4a6795422ad012da1226c43ed3c85aca1242d8c7defee5ef1d1e5decc34d0a5ce767f824cd8f3ad84182ea0b8c56c2fb0e5f0d1bb28b79cc52a8
data/README.md CHANGED
@@ -86,7 +86,7 @@ statsd = Datadog::Statsd.new('localhost', 8125, buffer_max_payload_size: 4096)
86
86
 
87
87
  ## Credits
88
88
 
89
- dogstatsd-ruby is forked from Rien Henrichs [original Statsd
89
+ dogstatsd-ruby is forked from Rein Henrichs [original Statsd
90
90
  client](https://github.com/reinh/statsd).
91
91
 
92
92
  Copyright (c) 2011 Rein Henrichs. See LICENSE.txt for
@@ -5,8 +5,10 @@ require_relative 'statsd/version'
5
5
  require_relative 'statsd/telemetry'
6
6
  require_relative 'statsd/udp_connection'
7
7
  require_relative 'statsd/uds_connection'
8
- require_relative 'statsd/batch'
8
+ require_relative 'statsd/message_buffer'
9
9
  require_relative 'statsd/serialization'
10
+ require_relative 'statsd/sender'
11
+ require_relative 'statsd/forwarder'
10
12
 
11
13
  # = Datadog::Statsd: A DogStatsd client (https://www.datadoghq.com)
12
14
  #
@@ -26,12 +28,17 @@ require_relative 'statsd/serialization'
26
28
  # statsd = Datadog::Statsd.new 'localhost', 8125, tags: 'tag1:true'
27
29
  module Datadog
28
30
  class Statsd
31
+ class Error < StandardError
32
+ end
33
+
29
34
  OK = 0
30
35
  WARNING = 1
31
36
  CRITICAL = 2
32
37
  UNKNOWN = 3
33
38
 
34
- DEFAULT_BUFFER_SIZE = 8 * 1_024
39
+ UDP_DEFAULT_BUFFER_SIZE = 1_432
40
+ UDS_DEFAULT_BUFFER_SIZE = 8_192
41
+ DEFAULT_BUFFER_POOL_SIZE = Float::INFINITY
35
42
  MAX_EVENT_SIZE = 8 * 1_024
36
43
  # minimum flush interval for the telemetry in seconds
37
44
  DEFAULT_TELEMETRY_FLUSH_INTERVAL = 10
@@ -51,67 +58,59 @@ module Datadog
51
58
  serializer.global_tags
52
59
  end
53
60
 
54
- # Buffer containing the statsd message before they are sent in batch
55
- attr_reader :buffer
56
-
57
- # Maximum buffer size in bytes before it is flushed
58
- attr_reader :max_buffer_bytes
59
-
60
61
  # Default sample rate
61
62
  attr_reader :sample_rate
62
63
 
63
- # Connection
64
- attr_reader :connection
65
-
66
64
  # @param [String] host your statsd host
67
65
  # @param [Integer] port your statsd port
68
66
  # @option [String] namespace set a namespace to be prepended to every metric name
69
67
  # @option [Array<String>|Hash] tags tags to be added to every metric
70
68
  # @option [Logger] logger for debugging
71
- # @option [Integer] max_buffer_bytes max bytes to buffer when using #batch
69
+ # @option [Integer] buffer_max_payload_size max bytes to buffer
70
+ # @option [Integer] buffer_max_pool_size max messages to buffer
72
71
  # @option [String] socket_path unix socket path
73
72
  # @option [Float] default sample rate if not overridden
74
73
  def initialize(
75
74
  host = nil,
76
75
  port = nil,
76
+ socket_path: nil,
77
+
77
78
  namespace: nil,
78
79
  tags: nil,
79
- max_buffer_bytes: DEFAULT_BUFFER_SIZE,
80
- socket_path: nil,
81
- logger: nil,
82
80
  sample_rate: nil,
83
- disable_telemetry: false,
81
+
82
+ buffer_max_payload_size: nil,
83
+ buffer_max_pool_size: nil,
84
+ buffer_overflowing_stategy: :drop,
85
+
86
+ logger: nil,
87
+
88
+ telemetry_enable: true,
84
89
  telemetry_flush_interval: DEFAULT_TELEMETRY_FLUSH_INTERVAL
85
90
  )
86
91
  unless tags.nil? || tags.is_a?(Array) || tags.is_a?(Hash)
87
- raise ArgumentError, 'tags must be a Array<String> or a Hash'
92
+ raise ArgumentError, 'tags must be an array of string tags or a Hash'
88
93
  end
89
94
 
90
95
  @namespace = namespace
91
96
  @prefix = @namespace ? "#{@namespace}.".freeze : nil
92
-
93
97
  @serializer = Serialization::Serializer.new(prefix: @prefix, global_tags: tags)
98
+ @sample_rate = sample_rate
94
99
 
95
- transport_type = socket_path.nil? ? :udp : :uds
100
+ @forwarder = Forwarder.new(
101
+ host: host,
102
+ port: port,
103
+ socket_path: socket_path,
96
104
 
97
- @telemetry = Telemetry.new(disable_telemetry, telemetry_flush_interval,
98
105
  global_tags: tags,
99
- transport_type: transport_type
100
- )
106
+ logger: logger,
101
107
 
102
- @connection = case transport_type
103
- when :udp
104
- UDPConnection.new(host, port, logger, telemetry)
105
- when :uds
106
- UDSConnection.new(socket_path, logger, telemetry)
107
- end
108
+ buffer_max_payload_size: buffer_max_payload_size,
109
+ buffer_max_pool_size: buffer_max_pool_size,
110
+ buffer_overflowing_stategy: buffer_overflowing_stategy,
108
111
 
109
- @logger = logger
110
-
111
- @sample_rate = sample_rate
112
-
113
- # we reduce max_buffer_bytes by a the rough estimate of the telemetry payload
114
- @batch = Batch.new(connection, (max_buffer_bytes - telemetry.estimate_max_size))
112
+ telemetry_flush_interval: telemetry_enable ? telemetry_flush_interval : nil,
113
+ )
115
114
  end
116
115
 
117
116
  # yield a new instance to a block and close it when done
@@ -270,9 +269,9 @@ module Datadog
270
269
  # @example Report a critical service check status
271
270
  # $statsd.service_check('my.service.check', Statsd::CRITICAL, :tags=>['urgent'])
272
271
  def service_check(name, status, opts = EMPTY_OPTIONS)
273
- telemetry.sent(service_checks: 1)
272
+ telemetry.sent(service_checks: 1) if telemetry
274
273
 
275
- send_stat(serializer.to_service_check(name, status, opts))
274
+ forwarder.send_message(serializer.to_service_check(name, status, opts))
276
275
  end
277
276
 
278
277
  # This end point allows you to post events to the stream. You can tag them, set priority and even aggregate them with other events.
@@ -295,33 +294,48 @@ module Datadog
295
294
  # @example Report an awful event:
296
295
  # $statsd.event('Something terrible happened', 'The end is near if we do nothing', :alert_type=>'warning', :tags=>['end_of_times','urgent'])
297
296
  def event(title, text, opts = EMPTY_OPTIONS)
298
- telemetry.sent(events: 1)
297
+ telemetry.sent(events: 1) if telemetry
299
298
 
300
- send_stat(serializer.to_event(title, text, opts))
301
- end
302
-
303
- # Send several metrics in the same UDP Packet
304
- # They will be buffered and flushed when the block finishes
305
- #
306
- # @example Send several metrics in one packet:
307
- # $statsd.batch do |s|
308
- # s.gauge('users.online',156)
309
- # s.increment('page.views')
310
- # end
311
- def batch
312
- @batch.open do
313
- yield self
314
- end
299
+ forwarder.send_message(serializer.to_event(title, text, opts))
315
300
  end
316
301
 
317
302
  # Close the underlying socket
318
303
  def close
319
- connection.close
304
+ forwarder.close
305
+ end
306
+
307
+ def sync_with_outbound_io
308
+ forwarder.sync_with_outbound_io
309
+ end
310
+
311
+ # Flush the buffer into the connection
312
+ def flush(flush_telemetry: false, sync: false)
313
+ forwarder.flush(flush_telemetry: flush_telemetry, sync: sync)
314
+ end
315
+
316
+ def telemetry
317
+ forwarder.telemetry
318
+ end
319
+
320
+ def host
321
+ forwarder.host
322
+ end
323
+
324
+ def port
325
+ forwarder.port
326
+ end
327
+
328
+ def socket_path
329
+ forwarder.socket_path
330
+ end
331
+
332
+ def transport_type
333
+ forwarder.transport_type
320
334
  end
321
335
 
322
336
  private
323
337
  attr_reader :serializer
324
- attr_reader :telemetry
338
+ attr_reader :forwarder
325
339
 
326
340
  PROCESS_TIME_SUPPORTED = (RUBY_VERSION >= '2.1.0')
327
341
  EMPTY_OPTIONS = {}.freeze
@@ -337,22 +351,14 @@ module Datadog
337
351
  end
338
352
 
339
353
  def send_stats(stat, delta, type, opts = EMPTY_OPTIONS)
340
- telemetry.sent(metrics: 1)
354
+ telemetry.sent(metrics: 1) if telemetry
341
355
 
342
356
  sample_rate = opts[:sample_rate] || @sample_rate || 1
343
357
 
344
358
  if sample_rate == 1 || rand <= sample_rate
345
359
  full_stat = serializer.to_stat(stat, delta, type, tags: opts[:tags], sample_rate: sample_rate)
346
360
 
347
- send_stat(full_stat)
348
- end
349
- end
350
-
351
- def send_stat(message)
352
- if @batch.open?
353
- @batch.add(message)
354
- else
355
- @connection.write(message)
361
+ forwarder.send_message(full_stat)
356
362
  end
357
363
  end
358
364
  end
@@ -3,8 +3,9 @@
3
3
  module Datadog
4
4
  class Statsd
5
5
  class Connection
6
- def initialize(telemetry)
6
+ def initialize(telemetry: nil, logger: nil)
7
7
  @telemetry = telemetry
8
+ @logger = logger
8
9
  end
9
10
 
10
11
  # Close the underlying socket
@@ -20,15 +21,11 @@ module Datadog
20
21
  def write(payload)
21
22
  logger.debug { "Statsd: #{payload}" } if logger
22
23
 
23
- flush_telemetry = telemetry.flush?
24
-
25
- payload += telemetry.flush if flush_telemetry
26
-
27
24
  send_message(payload)
28
25
 
29
- telemetry.reset if flush_telemetry
26
+ telemetry.sent(packets: 1, bytes: payload.length) if telemetry
30
27
 
31
- telemetry.sent(packets: 1, bytes: payload.length)
28
+ true
32
29
  rescue StandardError => boom
33
30
  # Try once to reconnect if the socket has been closed
34
31
  retries ||= 1
@@ -45,7 +42,7 @@ module Datadog
45
42
  end
46
43
  end
47
44
 
48
- telemetry.dropped(packets: 1, bytes: payload.length)
45
+ telemetry.dropped(packets: 1, bytes: payload.length) if telemetry
49
46
  logger.error { "Statsd: #{boom.class} #{boom}" } if logger
50
47
  nil
51
48
  end
@@ -0,0 +1,120 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Datadog
4
+ class Statsd
5
+ class Forwarder
6
+ attr_reader :telemetry
7
+ attr_reader :transport_type
8
+
9
+ def initialize(
10
+ host: nil,
11
+ port: nil,
12
+ socket_path: nil,
13
+
14
+ buffer_max_payload_size: nil,
15
+ buffer_max_pool_size: nil,
16
+ buffer_overflowing_stategy: :drop,
17
+
18
+ telemetry_flush_interval: nil,
19
+ global_tags: [],
20
+
21
+ logger: nil
22
+ )
23
+ @transport_type = socket_path.nil? ? :udp : :uds
24
+
25
+ if telemetry_flush_interval
26
+ @telemetry = Telemetry.new(telemetry_flush_interval,
27
+ global_tags: global_tags,
28
+ transport_type: transport_type
29
+ )
30
+ end
31
+
32
+ @connection = case transport_type
33
+ when :udp
34
+ UDPConnection.new(host, port, logger: logger, telemetry: telemetry)
35
+ when :uds
36
+ UDSConnection.new(socket_path, logger: logger, telemetry: telemetry)
37
+ end
38
+
39
+ # Initialize buffer
40
+ buffer_max_payload_size ||= (transport_type == :udp ? UDP_DEFAULT_BUFFER_SIZE : UDS_DEFAULT_BUFFER_SIZE)
41
+
42
+ if buffer_max_payload_size <= 0
43
+ raise ArgumentError, 'buffer_max_payload_size cannot be <= 0'
44
+ end
45
+
46
+ unless telemetry.nil? || telemetry.would_fit_in?(buffer_max_payload_size)
47
+ raise ArgumentError, "buffer_max_payload_size is not high enough to use telemetry (tags=(#{global_tags.inspect}))"
48
+ end
49
+
50
+ @buffer = MessageBuffer.new(@connection,
51
+ max_payload_size: buffer_max_payload_size,
52
+ max_pool_size: buffer_max_pool_size || DEFAULT_BUFFER_POOL_SIZE,
53
+ overflowing_stategy: buffer_overflowing_stategy,
54
+ )
55
+
56
+ @sender = Sender.new(buffer)
57
+ @sender.start
58
+ end
59
+
60
+ def send_message(message)
61
+ sender.add(message)
62
+
63
+ tick_telemetry
64
+ end
65
+
66
+ def sync_with_outbound_io
67
+ sender.rendez_vous
68
+ end
69
+
70
+ def flush(flush_telemetry: false, sync: false)
71
+ do_flush_telemetry if telemetry && flush_telemetry
72
+
73
+ sender.flush(sync: sync)
74
+ end
75
+
76
+ def host
77
+ return nil unless transport_type == :udp
78
+
79
+ connection.host
80
+ end
81
+
82
+ def port
83
+ return nil unless transport_type == :udp
84
+
85
+ connection.port
86
+ end
87
+
88
+ def socket_path
89
+ return nil unless transport_type == :uds
90
+
91
+ connection.socket_path
92
+ end
93
+
94
+ def close
95
+ sender.stop
96
+ connection.close
97
+ end
98
+
99
+ private
100
+ attr_reader :buffer
101
+ attr_reader :sender
102
+ attr_reader :connection
103
+
104
+ def do_flush_telemetry
105
+ telemetry_snapshot = telemetry.flush
106
+ telemetry.reset
107
+
108
+ telemetry_snapshot.each do |message|
109
+ sender.add(message)
110
+ end
111
+ end
112
+
113
+ def tick_telemetry
114
+ return nil unless telemetry
115
+
116
+ do_flush_telemetry if telemetry.should_flush?
117
+ end
118
+ end
119
+ end
120
+ end
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Datadog
4
+ class Statsd
5
+ class MessageBuffer
6
+ PAYLOAD_SIZE_TOLERANCE = 0.05
7
+
8
+ def initialize(connection,
9
+ max_payload_size: nil,
10
+ max_pool_size: DEFAULT_BUFFER_POOL_SIZE,
11
+ overflowing_stategy: :drop
12
+ )
13
+ raise ArgumentError, 'max_payload_size keyword argument must be provided' unless max_payload_size
14
+ raise ArgumentError, 'max_pool_size keyword argument must be provided' unless max_pool_size
15
+
16
+ @connection = connection
17
+ @max_payload_size = max_payload_size
18
+ @max_pool_size = max_pool_size
19
+ @overflowing_stategy = overflowing_stategy
20
+
21
+ @buffer = String.new
22
+ @message_count = 0
23
+ end
24
+
25
+ def add(message)
26
+ message_size = message.bytesize
27
+
28
+ return nil unless message_size > 0 # to avoid adding empty messages to the buffer
29
+ return nil unless ensure_sendable!(message_size)
30
+
31
+ flush if should_flush?(message_size)
32
+
33
+ buffer << "\n" unless buffer.empty?
34
+ buffer << message
35
+
36
+ @message_count += 1
37
+
38
+ # flush when we're pretty sure that we won't be able
39
+ # to add another message to the buffer
40
+ flush if preemptive_flush?
41
+
42
+ true
43
+ end
44
+
45
+ def flush
46
+ return if buffer.empty?
47
+
48
+ connection.write(buffer)
49
+
50
+ buffer.clear
51
+ @message_count = 0
52
+ end
53
+
54
+ private
55
+ attr :max_payload_size
56
+ attr :max_pool_size
57
+
58
+ attr :overflowing_stategy
59
+
60
+ attr :connection
61
+ attr :buffer
62
+
63
+ def should_flush?(message_size)
64
+ return true if buffer.bytesize + 1 + message_size >= max_payload_size
65
+
66
+ false
67
+ end
68
+
69
+ def preemptive_flush?
70
+ @message_count == max_pool_size || buffer.bytesize > bytesize_threshold
71
+ end
72
+
73
+ def ensure_sendable!(message_size)
74
+ return true if message_size <= max_payload_size
75
+
76
+ if overflowing_stategy == :raise
77
+ raise Error, 'Message too big for payload limit'
78
+ end
79
+
80
+ false
81
+ end
82
+
83
+ def bytesize_threshold
84
+ @bytesize_threshold ||= (max_payload_size - PAYLOAD_SIZE_TOLERANCE * max_payload_size).to_i
85
+ end
86
+ end
87
+ end
88
+ end
@@ -0,0 +1,110 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Datadog
4
+ class Statsd
5
+ class Sender
6
+ CLOSEABLE_QUEUES = Queue.instance_methods.include?(:close)
7
+
8
+ def initialize(message_buffer)
9
+ @message_buffer = message_buffer
10
+ end
11
+
12
+ def flush(sync: false)
13
+ raise ArgumentError, 'Start sender first' unless message_queue
14
+
15
+ message_queue.push(:flush)
16
+
17
+ rendez_vous if sync
18
+ end
19
+
20
+ def rendez_vous
21
+ # Initialize and get the thread's sync queue
22
+ queue = (Thread.current[:statsd_sync_queue] ||= Queue.new)
23
+ # tell sender-thread to notify us in the current
24
+ # thread's queue
25
+ message_queue.push(queue)
26
+ # wait for the sender thread to send a message
27
+ # once the flush is done
28
+ queue.pop
29
+ end
30
+
31
+ def add(message)
32
+ raise ArgumentError, 'Start sender first' unless message_queue
33
+
34
+ message_queue << message
35
+ end
36
+
37
+ def start
38
+ raise ArgumentError, 'Sender already started' if message_queue
39
+
40
+ # initialize message queue for background thread
41
+ @message_queue = Queue.new
42
+ # start background thread
43
+ @sender_thread = Thread.new(&method(:send_loop))
44
+ end
45
+
46
+ if CLOSEABLE_QUEUES
47
+ def stop(join_worker: true)
48
+ message_queue.close if message_queue
49
+ sender_thread.join if sender_thread && join_worker
50
+ end
51
+ else
52
+ def stop(join_worker: true)
53
+ message_queue << :close if message_queue
54
+ sender_thread.join if sender_thread && join_worker
55
+ end
56
+ end
57
+
58
+ private
59
+
60
+ attr_reader :message_buffer
61
+
62
+ attr_reader :message_queue
63
+ attr_reader :sender_thread
64
+
65
+ if CLOSEABLE_QUEUES
66
+ def send_loop
67
+ until (message = message_queue.pop).nil? && message_queue.closed?
68
+ # skip if message is nil, e.g. when message_queue
69
+ # is empty and closed
70
+ next unless message
71
+
72
+ case message
73
+ when :flush
74
+ message_buffer.flush
75
+ when Queue
76
+ message.push(:go_on)
77
+ else
78
+ message_buffer.add(message)
79
+ end
80
+ end
81
+
82
+ @message_queue = nil
83
+ @sender_thread = nil
84
+ end
85
+ else
86
+ def send_loop
87
+ loop do
88
+ message = message_queue.pop
89
+
90
+ next unless message
91
+
92
+ case message
93
+ when :close
94
+ break
95
+ when :flush
96
+ message_buffer.flush
97
+ when Queue
98
+ message.push(:go_on)
99
+ else
100
+ message_buffer.add(message)
101
+ end
102
+ end
103
+
104
+ @message_queue = nil
105
+ @sender_thread = nil
106
+ end
107
+ end
108
+ end
109
+ end
110
+ end
@@ -11,10 +11,11 @@ module Datadog
11
11
  attr_reader :bytes_dropped
12
12
  attr_reader :packets_sent
13
13
  attr_reader :packets_dropped
14
- attr_reader :estimate_max_size
15
14
 
16
- def initialize(disabled, flush_interval, global_tags: [], transport_type: :udp)
17
- @disabled = disabled
15
+ # Rough estimation of maximum telemetry message size without tags
16
+ MAX_TELEMETRY_MESSAGE_SIZE_WT_TAGS = 50 # bytes
17
+
18
+ def initialize(flush_interval, global_tags: [], transport_type: :udp)
18
19
  @flush_interval = flush_interval
19
20
  @global_tags = global_tags
20
21
  @transport_type = transport_type
@@ -27,15 +28,10 @@ module Datadog
27
28
  client_version: VERSION,
28
29
  client_transport: transport_type,
29
30
  ).format(global_tags)
31
+ end
30
32
 
31
- # estimate_max_size is an estimation or the maximum size of the
32
- # telemetry payload. Since we don't want our packet to go over
33
- # 'max_buffer_bytes', we have to adjust with the size of the telemetry
34
- # (and any tags used). The telemetry payload size will change depending
35
- # on the actual value of metrics: metrics received, packet dropped,
36
- # etc. This is why we add a 63bytes margin: 9 bytes for each of the 7
37
- # telemetry metrics.
38
- @estimate_max_size = disabled ? 0 : flush.length + 9 * 7
33
+ def would_fit_in?(max_buffer_payload_size)
34
+ MAX_TELEMETRY_MESSAGE_SIZE_WT_TAGS + serialized_tags.size < max_buffer_payload_size
39
35
  end
40
36
 
41
37
  def reset
@@ -63,27 +59,29 @@ module Datadog
63
59
  @packets_dropped += packets
64
60
  end
65
61
 
66
- def flush?
62
+ def should_flush?
67
63
  @next_flush_time < now_in_s
68
64
  end
69
65
 
70
66
  def flush
71
- return '' if @disabled
72
-
73
- # using shorthand syntax to reduce the garbage collection
74
- %Q(
75
- datadog.dogstatsd.client.metrics:#{@metrics}|#{COUNTER_TYPE}|##{serialized_tags}
76
- datadog.dogstatsd.client.events:#{@events}|#{COUNTER_TYPE}|##{serialized_tags}
77
- datadog.dogstatsd.client.service_checks:#{@service_checks}|#{COUNTER_TYPE}|##{serialized_tags}
78
- datadog.dogstatsd.client.bytes_sent:#{@bytes_sent}|#{COUNTER_TYPE}|##{serialized_tags}
79
- datadog.dogstatsd.client.bytes_dropped:#{@bytes_dropped}|#{COUNTER_TYPE}|##{serialized_tags}
80
- datadog.dogstatsd.client.packets_sent:#{@packets_sent}|#{COUNTER_TYPE}|##{serialized_tags}
81
- datadog.dogstatsd.client.packets_dropped:#{@packets_dropped}|#{COUNTER_TYPE}|##{serialized_tags})
67
+ [
68
+ sprintf(pattern, 'metrics', @metrics),
69
+ sprintf(pattern, 'events', @events),
70
+ sprintf(pattern, 'service_checks', @service_checks),
71
+ sprintf(pattern, 'bytes_sent', @bytes_sent),
72
+ sprintf(pattern, 'bytes_dropped', @bytes_dropped),
73
+ sprintf(pattern, 'packets_sent', @packets_sent),
74
+ sprintf(pattern, 'packets_dropped', @packets_dropped),
75
+ ]
82
76
  end
83
77
 
84
78
  private
85
79
  attr_reader :serialized_tags
86
80
 
81
+ def pattern
82
+ @pattern ||= "datadog.dogstatsd.client.%s:%d|#{COUNTER_TYPE}|##{serialized_tags}"
83
+ end
84
+
87
85
  if Kernel.const_defined?('Process') && Process.respond_to?(:clock_gettime)
88
86
  def now_in_s
89
87
  Process.clock_gettime(Process::CLOCK_MONOTONIC, :second)
@@ -14,11 +14,11 @@ module Datadog
14
14
  # StatsD port. Defaults to 8125.
15
15
  attr_reader :port
16
16
 
17
- def initialize(host, port, logger, telemetry)
18
- super(telemetry)
17
+ def initialize(host, port, **kwargs)
18
+ super(**kwargs)
19
+
19
20
  @host = host || ENV.fetch('DD_AGENT_HOST', DEFAULT_HOST)
20
21
  @port = port || ENV.fetch('DD_DOGSTATSD_PORT', DEFAULT_PORT).to_i
21
- @logger = logger
22
22
  end
23
23
 
24
24
  private
@@ -10,10 +10,10 @@ module Datadog
10
10
  # DogStatsd unix socket path
11
11
  attr_reader :socket_path
12
12
 
13
- def initialize(socket_path, logger, telemetry)
14
- super(telemetry)
13
+ def initialize(socket_path, **kwargs)
14
+ super(**kwargs)
15
+
15
16
  @socket_path = socket_path
16
- @logger = logger
17
17
  end
18
18
 
19
19
  private
@@ -4,6 +4,6 @@ require_relative 'connection'
4
4
 
5
5
  module Datadog
6
6
  class Statsd
7
- VERSION = '4.8.3'
7
+ VERSION = '5.0.0'
8
8
  end
9
9
  end
metadata CHANGED
@@ -1,16 +1,17 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: dogstatsd-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.8.3
4
+ version: 5.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Rein Henrichs
8
+ - Karim Bogtob
8
9
  autorequire:
9
10
  bindir: bin
10
11
  cert_chain: []
11
- date: 2021-02-22 00:00:00.000000000 Z
12
+ date: 2021-04-07 00:00:00.000000000 Z
12
13
  dependencies: []
13
- description: A Ruby DogStastd client
14
+ description: A Ruby DogStatsd client
14
15
  email: code@datadoghq.com
15
16
  executables: []
16
17
  extensions: []
@@ -21,8 +22,10 @@ files:
21
22
  - LICENSE.txt
22
23
  - README.md
23
24
  - lib/datadog/statsd.rb
24
- - lib/datadog/statsd/batch.rb
25
25
  - lib/datadog/statsd/connection.rb
26
+ - lib/datadog/statsd/forwarder.rb
27
+ - lib/datadog/statsd/message_buffer.rb
28
+ - lib/datadog/statsd/sender.rb
26
29
  - lib/datadog/statsd/serialization.rb
27
30
  - lib/datadog/statsd/serialization/event_serializer.rb
28
31
  - lib/datadog/statsd/serialization/serializer.rb
@@ -38,9 +41,9 @@ licenses:
38
41
  - MIT
39
42
  metadata:
40
43
  bug_tracker_uri: https://github.com/DataDog/dogstatsd-ruby/issues
41
- changelog_uri: https://github.com/DataDog/dogstatsd-ruby/blob/v4.8.3/CHANGELOG.md
42
- documentation_uri: https://www.rubydoc.info/gems/dogstatsd-ruby/4.8.3
43
- source_code_uri: https://github.com/DataDog/dogstatsd-ruby/tree/v4.8.3
44
+ changelog_uri: https://github.com/DataDog/dogstatsd-ruby/blob/v5.0.0/CHANGELOG.md
45
+ documentation_uri: https://www.rubydoc.info/gems/dogstatsd-ruby/5.0.0
46
+ source_code_uri: https://github.com/DataDog/dogstatsd-ruby/tree/v5.0.0
44
47
  post_install_message:
45
48
  rdoc_options: []
46
49
  require_paths:
@@ -1,56 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Datadog
4
- class Statsd
5
- class Batch
6
- def initialize(connection, max_buffer_bytes)
7
- @connection = connection
8
- @max_buffer_bytes = max_buffer_bytes
9
- @depth = 0
10
- reset
11
- end
12
-
13
- def open
14
- @depth += 1
15
-
16
- yield
17
- ensure
18
- @depth -= 1
19
- flush if !open?
20
- end
21
-
22
- def open?
23
- @depth > 0
24
- end
25
-
26
- def add(message)
27
- message_bytes = message.bytesize
28
-
29
- unless @buffer_bytes == 0
30
- if @buffer_bytes + 1 + message_bytes >= @max_buffer_bytes
31
- flush
32
- else
33
- @buffer << "\n"
34
- @buffer_bytes += 1
35
- end
36
- end
37
-
38
- @buffer << message
39
- @buffer_bytes += message_bytes
40
- end
41
-
42
- def flush
43
- return if @buffer_bytes == 0
44
- @connection.write(@buffer)
45
- reset
46
- end
47
-
48
- private
49
-
50
- def reset
51
- @buffer = String.new
52
- @buffer_bytes = 0
53
- end
54
- end
55
- end
56
- end