statsd-instrument 3.8.0 → 3.9.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -82,29 +82,49 @@ module StatsD
82
82
  env.fetch("STATSD_ADDR", "localhost:8125")
83
83
  end
84
84
 
85
+ def statsd_socket_path
86
+ env.fetch("STATSD_SOCKET_PATH", "")
87
+ end
88
+
85
89
  def statsd_default_tags
86
90
  env.key?("STATSD_DEFAULT_TAGS") ? env.fetch("STATSD_DEFAULT_TAGS").split(",") : nil
87
91
  end
88
92
 
89
93
  def statsd_buffer_capacity
90
- Integer(env.fetch("STATSD_BUFFER_CAPACITY", StatsD::Instrument::BatchedUDPSink::DEFAULT_BUFFER_CAPACITY))
94
+ Integer(env.fetch("STATSD_BUFFER_CAPACITY", StatsD::Instrument::BatchedSink::DEFAULT_BUFFER_CAPACITY))
91
95
  end
92
96
 
93
97
  def statsd_batching?
94
98
  statsd_buffer_capacity > 0 && Float(env.fetch("STATSD_FLUSH_INTERVAL", 1.0)) > 0.0
95
99
  end
96
100
 
101
+ def statsd_uds_send?
102
+ !statsd_socket_path.empty?
103
+ end
104
+
97
105
  def statsd_max_packet_size
98
- Float(env.fetch("STATSD_MAX_PACKET_SIZE", StatsD::Instrument::BatchedUDPSink::DEFAULT_MAX_PACKET_SIZE))
106
+ if statsd_uds_send?
107
+ return Float(env.fetch("STATSD_MAX_PACKET_SIZE", StatsD::Instrument::UdsConnection::DEFAULT_MAX_PACKET_SIZE))
108
+ end
109
+
110
+ Float(env.fetch("STATSD_MAX_PACKET_SIZE", StatsD::Instrument::UdpConnection::DEFAULT_MAX_PACKET_SIZE))
99
111
  end
100
112
 
101
113
  def statsd_batch_statistics_interval
102
114
  Integer(env.fetch(
103
115
  "STATSD_BATCH_STATISTICS_INTERVAL",
104
- StatsD::Instrument::BatchedUDPSink::DEFAULT_STATISTICS_INTERVAL,
116
+ StatsD::Instrument::BatchedSink::DEFAULT_STATISTICS_INTERVAL,
105
117
  ))
106
118
  end
107
119
 
120
+ def experimental_aggregation_enabled?
121
+ env.key?("STATSD_ENABLE_AGGREGATION")
122
+ end
123
+
124
+ def aggregation_interval
125
+ Float(env.fetch("STATSD_AGGREGATION_INTERVAL", 2.0))
126
+ end
127
+
108
128
  def client
109
129
  StatsD::Instrument::Client.from_env(self)
110
130
  end
@@ -112,16 +132,24 @@ module StatsD
112
132
  def default_sink_for_environment
113
133
  case environment
114
134
  when "production", "staging"
135
+ connection = if statsd_uds_send?
136
+ StatsD::Instrument::UdsConnection.new(statsd_socket_path)
137
+ else
138
+ host, port = statsd_addr.split(":")
139
+ StatsD::Instrument::UdpConnection.new(host, port.to_i)
140
+ end
141
+
142
+ sink = StatsD::Instrument::Sink.new(connection)
115
143
  if statsd_batching?
116
- StatsD::Instrument::BatchedUDPSink.for_addr(
117
- statsd_addr,
144
+ # if we are batching, wrap the sink in a batched sink
145
+ return StatsD::Instrument::BatchedSink.new(
146
+ sink,
118
147
  buffer_capacity: statsd_buffer_capacity,
119
148
  max_packet_size: statsd_max_packet_size,
120
149
  statistics_interval: statsd_batch_statistics_interval,
121
150
  )
122
- else
123
- StatsD::Instrument::UDPSink.for_addr(statsd_addr)
124
151
  end
152
+ sink
125
153
  when "test"
126
154
  StatsD::Instrument::NullSink.new
127
155
  else
@@ -2,18 +2,21 @@
2
2
 
3
3
  module StatsD
4
4
  module Instrument
5
- # @note This class is part of the new Client implementation that is intended
6
- # to become the new default in the next major release of this library.
7
- class UDPSink
5
+ class Sink
8
6
  class << self
9
7
  def for_addr(addr)
10
- host, port_as_string = addr.split(":", 2)
11
- new(host, Integer(port_as_string))
8
+ # if addr is host:port
9
+ if addr.include?(":")
10
+ host, port_as_string = addr.split(":", 2)
11
+ connection = UdpConnection.new(host, Integer(port_as_string))
12
+ new(connection)
13
+ else
14
+ connection = UdsConnection.new(addr)
15
+ new(connection)
16
+ end
12
17
  end
13
18
  end
14
19
 
15
- attr_reader :host, :port
16
-
17
20
  FINALIZER = ->(object_id) do
18
21
  Thread.list.each do |thread|
19
22
  if (store = thread["StatsD::UDPSink"])
@@ -22,10 +25,9 @@ module StatsD
22
25
  end
23
26
  end
24
27
 
25
- def initialize(host, port)
28
+ def initialize(connection = nil)
26
29
  ObjectSpace.define_finalizer(self, FINALIZER)
27
- @host = host
28
- @port = port
30
+ @connection = connection
29
31
  end
30
32
 
31
33
  def sample?(sample_rate)
@@ -35,12 +37,12 @@ module StatsD
35
37
  def <<(datagram)
36
38
  retried = false
37
39
  begin
38
- socket.send(datagram, 0)
40
+ connection.send_datagram(datagram)
39
41
  rescue SocketError, IOError, SystemCallError => error
40
42
  StatsD.logger.debug do
41
- "[StatsD::Instrument::UDPSink] Resetting connection because of #{error.class}: #{error.message}"
43
+ "[#{self.class.name}] Resetting connection because of #{error.class}: #{error.message}"
42
44
  end
43
- invalidate_socket
45
+ invalidate_connection
44
46
  if retried
45
47
  StatsD.logger.warn do
46
48
  "[#{self.class.name}] Events were dropped because of #{error.class}: #{error.message}"
@@ -53,27 +55,34 @@ module StatsD
53
55
  self
54
56
  end
55
57
 
56
- def flush(blocking:)
58
+ def flush(blocking: false)
57
59
  # noop
58
60
  end
59
61
 
60
- private
62
+ def connection_type
63
+ connection.class.name
64
+ end
61
65
 
62
- def invalidate_socket
63
- socket = thread_store.delete(object_id)
64
- socket&.close
66
+ def connection
67
+ thread_store[object_id] ||= @connection
65
68
  end
66
69
 
67
- def socket
68
- thread_store[object_id] ||= begin
69
- socket = UDPSocket.new
70
- socket.connect(@host, @port)
71
- socket
72
- end
70
+ def host
71
+ connection.host
72
+ end
73
+
74
+ def port
75
+ connection.port
76
+ end
77
+
78
+ private
79
+
80
+ def invalidate_connection
81
+ connection&.close
73
82
  end
74
83
 
75
84
  def thread_store
76
- Thread.current["StatsD::UDPSink"] ||= {}
85
+ Thread.current["StatsD::Sink"] ||= {}
77
86
  end
78
87
  end
79
88
  end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ module StatsD
4
+ module Instrument
5
+ class UdpConnection
6
+ DEFAULT_MAX_PACKET_SIZE = 1_472
7
+
8
+ attr_reader :host, :port
9
+
10
+ def initialize(host, port)
11
+ @host = host
12
+ @port = port
13
+ end
14
+
15
+ def send_datagram(message)
16
+ socket.send(message, 0)
17
+ end
18
+
19
+ def close
20
+ @socket&.close
21
+ @socket = nil
22
+ end
23
+
24
+ def type
25
+ :udp
26
+ end
27
+
28
+ private
29
+
30
+ def socket
31
+ @socket ||= begin
32
+ socket = UDPSocket.new
33
+ socket.connect(@host, @port)
34
+ socket
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module StatsD
4
+ module Instrument
5
+ class UdsConnection
6
+ DEFAULT_MAX_PACKET_SIZE = 8_192
7
+
8
+ def initialize(socket_path, max_packet_size: DEFAULT_MAX_PACKET_SIZE)
9
+ if max_packet_size <= 0
10
+ StatsD.logger.warn do
11
+ "[StatsD::Instrument::UdsConnection] max_packet_size must be greater than 0, " \
12
+ "using default: #{DEFAULT_MAX_PACKET_SIZE}"
13
+ end
14
+ end
15
+ @socket_path = socket_path
16
+ @max_packet_size = max_packet_size
17
+ end
18
+
19
+ def send_datagram(message)
20
+ socket.sendmsg(message, 0)
21
+ end
22
+
23
+ def close
24
+ @socket&.close
25
+ @socket = nil
26
+ end
27
+
28
+ def host
29
+ nil
30
+ end
31
+
32
+ def port
33
+ nil
34
+ end
35
+
36
+ def type
37
+ :uds
38
+ end
39
+
40
+ private
41
+
42
+ def socket
43
+ @socket ||= begin
44
+ socket = Socket.new(Socket::AF_UNIX, Socket::SOCK_DGRAM)
45
+ socket.setsockopt(Socket::SOL_SOCKET, Socket::SO_SNDBUF, @max_packet_size.to_i)
46
+ socket.connect(Socket.pack_sockaddr_un(@socket_path))
47
+ socket
48
+ end
49
+ end
50
+ end
51
+ end
52
+ end
@@ -2,6 +2,6 @@
2
2
 
3
3
  module StatsD
4
4
  module Instrument
5
- VERSION = "3.8.0"
5
+ VERSION = "3.9.0"
6
6
  end
7
7
  end
@@ -201,7 +201,10 @@ module StatsD
201
201
  #
202
202
  # @param method (see #statsd_measure)
203
203
  # @param name (see #statsd_measure)
204
- # @param metric_options (see #statsd_measure)
204
+ # @param sample_rate
205
+ # @param tags
206
+ # @param no_prefix
207
+ # @param client
205
208
  # @return [void]
206
209
  def statsd_count(method, name, sample_rate: nil, tags: nil, no_prefix: false, client: nil)
207
210
  add_to_method(method, name, :count) do
@@ -386,19 +389,22 @@ end
386
389
  require "statsd/instrument/version"
387
390
  require "statsd/instrument/client"
388
391
  require "statsd/instrument/datagram"
392
+ require "statsd/instrument/aggregator"
389
393
  require "statsd/instrument/dogstatsd_datagram"
390
394
  require "statsd/instrument/datagram_builder"
391
395
  require "statsd/instrument/statsd_datagram_builder"
392
396
  require "statsd/instrument/dogstatsd_datagram_builder"
393
397
  require "statsd/instrument/null_sink"
394
- require "statsd/instrument/udp_sink"
395
- require "statsd/instrument/batched_udp_sink"
396
398
  require "statsd/instrument/capture_sink"
397
399
  require "statsd/instrument/log_sink"
398
400
  require "statsd/instrument/environment"
399
401
  require "statsd/instrument/helpers"
400
402
  require "statsd/instrument/assertions"
401
403
  require "statsd/instrument/expectation"
404
+ require "statsd/instrument/uds_connection"
405
+ require "statsd/instrument/udp_connection"
406
+ require "statsd/instrument/sink"
407
+ require "statsd/instrument/batched_sink"
402
408
  require "statsd/instrument/matchers" if defined?(RSpec)
403
409
  require "statsd/instrument/railtie" if defined?(Rails::Railtie)
404
410
  require "statsd/instrument/strict" if ENV["STATSD_STRICT_MODE"]
@@ -0,0 +1,142 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "test_helper"
4
+
5
+ class AggregatorTest < Minitest::Test
6
+ def setup
7
+ @sink = StatsD::Instrument::CaptureSink.new(parent: StatsD::Instrument::NullSink.new)
8
+ @subject = StatsD::Instrument::Aggregator.new(
9
+ @sink, StatsD::Instrument::DatagramBuilder, nil, [], flush_interval: 0.1
10
+ )
11
+ end
12
+
13
+ def teardown
14
+ @sink.clear
15
+ end
16
+
17
+ def test_increment_simple
18
+ @subject.increment("foo", 1, tags: { foo: "bar" })
19
+ @subject.increment("foo", 1, tags: { foo: "bar" })
20
+ @subject.flush
21
+
22
+ datagram = @sink.datagrams.first
23
+ assert_equal("foo", datagram.name)
24
+ assert_equal(2, datagram.value)
25
+ assert_equal(1.0, datagram.sample_rate)
26
+ assert_equal(["foo:bar"], datagram.tags)
27
+ end
28
+
29
+ def test_distribution_simple
30
+ @subject.aggregate_timing("foo", 1, tags: { foo: "bar" })
31
+ @subject.aggregate_timing("foo", 100, tags: { foo: "bar" })
32
+ @subject.flush
33
+
34
+ datagram = @sink.datagrams.first
35
+ assert_equal("foo", datagram.name)
36
+ assert_equal(2, datagram.value.size)
37
+ assert_equal([1.0, 100.0], datagram.value)
38
+ end
39
+
40
+ def test_mixed_type_timings
41
+ @subject.aggregate_timing("foo_ms", 1, tags: { foo: "bar" }, type: :ms)
42
+ @subject.aggregate_timing("foo_ms", 100, tags: { foo: "bar" }, type: :ms)
43
+
44
+ @subject.aggregate_timing("foo_d", 100, tags: { foo: "bar" }, type: :d)
45
+ @subject.aggregate_timing("foo_d", 120, tags: { foo: "bar" }, type: :d)
46
+
47
+ @subject.flush
48
+
49
+ assert_equal(2, @sink.datagrams.size)
50
+ assert_equal(1, @sink.datagrams.filter { |d| d.name == "foo_ms" }.size)
51
+ assert_equal(1, @sink.datagrams.filter { |d| d.name == "foo_d" }.size)
52
+ assert_equal("ms", @sink.datagrams.find { |d| d.name == "foo_ms" }.type.to_s)
53
+ assert_equal("d", @sink.datagrams.find { |d| d.name == "foo_d" }.type.to_s)
54
+ end
55
+
56
+ def test_gauge_simple
57
+ @subject.gauge("foo", 1, tags: { foo: "bar" })
58
+ @subject.gauge("foo", 100, tags: { foo: "bar" })
59
+ @subject.flush
60
+
61
+ datagram = @sink.datagrams.first
62
+ assert_equal("foo", datagram.name)
63
+ assert_equal(100, datagram.value)
64
+ assert_equal(:g, datagram.type)
65
+ end
66
+
67
+ def test_increment_with_tags_in_different_orders
68
+ @subject.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
69
+ @subject.increment("foo", 1, tags: ["tag2:val2", "tag1:val1"])
70
+ @subject.flush
71
+
72
+ assert_equal(2, @sink.datagrams.first.value)
73
+ end
74
+
75
+ def test_increment_with_different_tag_values
76
+ @subject.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
77
+ @subject.increment("foo", 1, tags: { tag1: "val1", tag2: "val2" })
78
+
79
+ @subject.increment("bar")
80
+ @subject.flush
81
+
82
+ assert_equal(2, @sink.datagrams.first.value)
83
+ assert_equal(2, @sink.datagrams.size)
84
+ assert_equal(["tag1:val1", "tag2:val2"], @sink.datagrams.first.tags)
85
+ end
86
+
87
+ def test_increment_with_different_metric_names
88
+ @subject.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
89
+ @subject.increment("bar", 1, tags: ["tag1:val1", "tag2:val2"])
90
+ @subject.flush
91
+
92
+ assert_equal(1, @sink.datagrams.find { |d| d.name == "foo" }.value)
93
+ assert_equal(1, @sink.datagrams.find { |d| d.name == "bar" }.value)
94
+ end
95
+
96
+ def test_increment_with_different_values
97
+ @subject.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
98
+ @subject.increment("foo", 2, tags: ["tag1:val1", "tag2:val2"])
99
+ @subject.flush
100
+
101
+ assert_equal(3, @sink.datagrams.first.value)
102
+ end
103
+
104
+ def test_send_mixed_types_will_pass_through
105
+ @subject.increment("test_counter", 1, tags: ["tag1:val1", "tag2:val2"])
106
+ @subject.aggregate_timing("test_counter", 100, tags: ["tag1:val1", "tag2:val2"])
107
+
108
+ @subject.gauge("test_gauge", 100, tags: ["tag1:val1", "tag2:val2"])
109
+ @subject.increment("test_gauge", 1, tags: ["tag1:val1", "tag2:val2"])
110
+
111
+ @subject.aggregate_timing("test_timing", 100, tags: ["tag1:val1", "tag2:val2"])
112
+ @subject.gauge("test_timing", 100, tags: ["tag1:val1", "tag2:val2"])
113
+ @subject.flush
114
+
115
+ assert_equal(6, @sink.datagrams.size)
116
+
117
+ assert_equal(2, @sink.datagrams.filter { |d| d.name == "test_counter" }.size)
118
+ assert_equal(2, @sink.datagrams.filter { |d| d.name == "test_gauge" }.size)
119
+ assert_equal(2, @sink.datagrams.filter { |d| d.name == "test_timing" }.size)
120
+
121
+ assert_equal(:d, @sink.datagrams.find { |d| d.name == "test_timing" }.type)
122
+ assert_equal(:g, @sink.datagrams.find { |d| d.name == "test_gauge" }.type)
123
+ assert_equal(:c, @sink.datagrams.find { |d| d.name == "test_counter" }.type)
124
+ end
125
+
126
+ def test_with_prefix
127
+ aggregator = StatsD::Instrument::Aggregator.new(@sink, StatsD::Instrument::DatagramBuilder, "MyApp", [])
128
+
129
+ aggregator.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
130
+ aggregator.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"])
131
+
132
+ aggregator.increment("foo", 1, tags: ["tag1:val1", "tag2:val2"], no_prefix: true)
133
+ aggregator.flush
134
+
135
+ assert_equal(2, @sink.datagrams.size)
136
+ assert_equal("MyApp.foo", @sink.datagrams.first.name)
137
+ assert_equal(2, @sink.datagrams.first.value)
138
+
139
+ assert_equal("foo", @sink.datagrams.last.name)
140
+ assert_equal(1, @sink.datagrams.last.value)
141
+ end
142
+ end
data/test/client_test.rb CHANGED
@@ -24,7 +24,7 @@ class ClientTest < Minitest::Test
24
24
  assert_equal(["shard:1", "env:production"], client.default_tags)
25
25
  assert_equal(StatsD::Instrument::StatsDDatagramBuilder, client.datagram_builder_class)
26
26
 
27
- assert_kind_of(StatsD::Instrument::BatchedUDPSink, client.sink)
27
+ assert_kind_of(StatsD::Instrument::BatchedSink, client.sink)
28
28
  assert_equal("1.2.3.4", client.sink.host)
29
29
  assert_equal(8125, client.sink.port)
30
30
  end
@@ -63,6 +63,41 @@ class ClientTest < Minitest::Test
63
63
  assert_kind_of(StatsD::Instrument::NullSink, client.sink)
64
64
  end
65
65
 
66
+ def test_client_from_env_with_aggregation
67
+ env = StatsD::Instrument::Environment.new(
68
+ "STATSD_SAMPLE_RATE" => "0.1",
69
+ "STATSD_PREFIX" => "foo",
70
+ "STATSD_DEFAULT_TAGS" => "shard:1,env:production",
71
+ "STATSD_IMPLEMENTATION" => "statsd",
72
+ "STATSD_ENABLE_AGGREGATION" => "true",
73
+ "STATSD_BUFFER_CAPACITY" => "0",
74
+ )
75
+ client = StatsD::Instrument::Client.from_env(
76
+ env,
77
+ prefix: "bar",
78
+ implementation: "dogstatsd",
79
+ sink: StatsD::Instrument::CaptureSink.new(parent: StatsD::Instrument::NullSink.new),
80
+ )
81
+
82
+ assert_equal(0.1, client.default_sample_rate)
83
+ assert_equal("bar", client.prefix)
84
+ assert_equal(["shard:1", "env:production"], client.default_tags)
85
+ assert_equal(StatsD::Instrument::DogStatsDDatagramBuilder, client.datagram_builder_class)
86
+
87
+ client.increment("foo", 1, sample_rate: 0.5, tags: { foo: "bar" })
88
+ client.increment("foo", 1, sample_rate: 0.5, tags: { foo: "bar" })
89
+
90
+ client.measure("block_duration_example") { 1 + 1 }
91
+ client.force_flush
92
+
93
+ datagram = client.sink.datagrams.first
94
+ assert_equal("bar.foo", datagram.name)
95
+ assert_equal(2, datagram.value)
96
+
97
+ datagram = client.sink.datagrams.find { |d| d.name == "bar.block_duration_example" }
98
+ assert_equal(true, !datagram.nil?)
99
+ end
100
+
66
101
  def test_capture
67
102
  inner_datagrams = nil
68
103
 
@@ -121,4 +121,9 @@ class DatagramBuilderTest < Minitest::Test
121
121
  datagram = datagram_builder.c("bar", 1, nil, nil)
122
122
  assert_equal("bar:1|c|#foo", datagram)
123
123
  end
124
+
125
+ def test_timing_with_multiple_values
126
+ dt = @datagram_builder.timing_value_packed("test_distribution", "d", [0, 3, 6, 9, 12, 15, 18, 21, 24, 27], 1, nil)
127
+ assert_equal("test_distribution:0:3:6:9:12:15:18:21:24:27|d", dt)
128
+ end
124
129
  end
@@ -6,7 +6,7 @@ class DispatcherStatsTest < Minitest::Test
6
6
  include StatsD::Instrument::Assertions
7
7
 
8
8
  def test_maybe_flush
9
- stats = StatsD::Instrument::BatchedUDPSink::DispatcherStats.new(0)
9
+ stats = StatsD::Instrument::BatchedSink::DispatcherStats.new(0, "udp")
10
10
 
11
11
  stats.increment_synchronous_sends
12
12
  stats.increment_batched_sends(1, 1, 1)
@@ -25,13 +25,13 @@ class DispatcherStatsTest < Minitest::Test
25
25
  assert_equal(0, stats.instance_variable_get(:@avg_batched_packet_size))
26
26
  assert_equal(0, stats.instance_variable_get(:@avg_batch_length))
27
27
 
28
- stats = StatsD::Instrument::BatchedUDPSink::DispatcherStats.new(1)
28
+ stats = StatsD::Instrument::BatchedSink::DispatcherStats.new(1, :udp)
29
29
  stats.increment_batched_sends(1, 1, 1)
30
30
  assert_no_statsd_calls { stats.maybe_flush! }
31
31
  end
32
32
 
33
33
  def test_calculations_are_correct
34
- stats = StatsD::Instrument::BatchedUDPSink::DispatcherStats.new(0)
34
+ stats = StatsD::Instrument::BatchedSink::DispatcherStats.new(0, :udp)
35
35
 
36
36
  5.times { stats.increment_synchronous_sends }
37
37
  assert_equal(5, stats.instance_variable_get(:@synchronous_sends))
@@ -48,12 +48,12 @@ class EnvironmentTest < Minitest::Test
48
48
 
49
49
  def test_client_from_env_uses_batched_udp_sink_in_staging_environment
50
50
  env = StatsD::Instrument::Environment.new("STATSD_USE_NEW_CLIENT" => "1", "STATSD_ENV" => "staging")
51
- assert_kind_of(StatsD::Instrument::BatchedUDPSink, env.client.sink)
51
+ assert_kind_of(StatsD::Instrument::BatchedSink, env.client.sink)
52
52
  end
53
53
 
54
54
  def test_client_from_env_uses_batched_udp_sink_in_production_environment
55
55
  env = StatsD::Instrument::Environment.new("STATSD_USE_NEW_CLIENT" => "1", "STATSD_ENV" => "production")
56
- assert_kind_of(StatsD::Instrument::BatchedUDPSink, env.client.sink)
56
+ assert_kind_of(StatsD::Instrument::BatchedSink, env.client.sink)
57
57
  end
58
58
 
59
59
  def test_client_from_env_uses_regular_udp_sink_when_flush_interval_is_0
@@ -65,7 +65,7 @@ class EnvironmentTest < Minitest::Test
65
65
  "STATSD_ENV" => "staging",
66
66
  "STATSD_FLUSH_INTERVAL" => "0.0",
67
67
  )
68
- assert_kind_of(StatsD::Instrument::UDPSink, env.client.sink)
68
+ assert_kind_of(StatsD::Instrument::Sink, env.client.sink)
69
69
  end
70
70
 
71
71
  def test_client_from_env_uses_regular_udp_sink_when_buffer_capacity_is_0
@@ -74,6 +74,6 @@ class EnvironmentTest < Minitest::Test
74
74
  "STATSD_ENV" => "staging",
75
75
  "STATSD_BUFFER_CAPACITY" => "0",
76
76
  )
77
- assert_kind_of(StatsD::Instrument::UDPSink, env.client.sink)
77
+ assert_kind_of(StatsD::Instrument::Sink, env.client.sink)
78
78
  end
79
79
  end
@@ -26,4 +26,55 @@ class IntegrationTest < Minitest::Test
26
26
  StatsD.increment("counter")
27
27
  assert_equal("counter:1|c", @server.recvfrom(100).first)
28
28
  end
29
+
30
+ def test_live_local_udp_socket_with_aggregation_flush
31
+ client = StatsD::Instrument::Environment.new(
32
+ "STATSD_ADDR" => "#{@server.addr[2]}:#{@server.addr[1]}",
33
+ "STATSD_IMPLEMENTATION" => "dogstatsd",
34
+ "STATSD_ENV" => "production",
35
+ "STATSD_ENABLE_AGGREGATION" => "true",
36
+ "STATSD_AGGREGATION_INTERVAL" => "5.0",
37
+ ).client
38
+
39
+ 10.times do |i|
40
+ client.increment("counter", 2)
41
+ client.distribution("test_distribution", 3 * i)
42
+ client.gauge("test_gauge", 3 * i)
43
+ end
44
+
45
+ client.force_flush
46
+
47
+ packets = []
48
+ while IO.select([@server], nil, nil, 0.1)
49
+ packets << @server.recvfrom(200).first
50
+ end
51
+ packets = packets.map { |packet| packet.split("\n") }.flatten
52
+
53
+ assert_equal("counter:20|c", packets.find { |packet| packet.start_with?("counter:") })
54
+ assert_equal(
55
+ "test_distribution:0:3:6:9:12:15:18:21:24:27|d",
56
+ packets.find { |packet| packet.start_with?("test_distribution:") },
57
+ )
58
+ assert_equal("test_gauge:27|g", packets.find { |packet| packet.start_with?("test_gauge:") })
59
+ end
60
+
61
+ def test_live_local_udp_socket_with_aggregation_periodic_flush
62
+ client = StatsD::Instrument::Environment.new(
63
+ "STATSD_ADDR" => "#{@server.addr[2]}:#{@server.addr[1]}",
64
+ "STATSD_IMPLEMENTATION" => "dogstatsd",
65
+ "STATSD_ENV" => "production",
66
+ "STATSD_ENABLE_AGGREGATION" => "true",
67
+ "STATSD_AGGREGATION_INTERVAL" => "0.1",
68
+ ).client
69
+
70
+ 10.times do
71
+ client.increment("counter", 2)
72
+ end
73
+
74
+ before_flush = Time.now
75
+ sleep(0.2)
76
+
77
+ assert_equal("counter:20|c", @server.recvfrom(100).first)
78
+ assert_operator(Time.now - before_flush, :<, 0.3, "Flush and ingest should have happened within 0.4s")
79
+ end
29
80
  end
data/test/test_helper.rb CHANGED
@@ -6,8 +6,13 @@ end
6
6
 
7
7
  ENV["ENV"] = "test"
8
8
 
9
+ unless ENV.key?("CI")
10
+ require "minitest/pride"
11
+ end
9
12
  require "minitest/autorun"
10
- require "minitest/pride"
13
+ unless ENV.key?("CI")
14
+ require "minitest/pride"
15
+ end
11
16
  require "mocha/minitest"
12
17
  require "statsd-instrument"
13
18