statsd-instrument 3.9.5 → 3.9.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +5 -0
- data/lib/statsd/instrument/aggregator.rb +1 -1
- data/lib/statsd/instrument/client.rb +35 -20
- data/lib/statsd/instrument/version.rb +1 -1
- data/test/aggregator_test.rb +3 -2
- data/test/client_test.rb +7 -2
- data/test/integration_test.rb +26 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 62a2601a6b425e15a8f3b616fcb182d48548179d95bc5b9bbdef74f5cdabcc1d
|
4
|
+
data.tar.gz: 64a4fd724bcee0e6fe8cac4001439c6a9e0b4b9e5234cd5b654b3df4045171cf
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d74d19d5e29895763d423dd0daf67bedfeeb3e05ebff891f87eda90d4482648f5cd8bb3b163c000f2fe70473ece32f9f4febf36530bcfd16ab2d9d699cf1d49d
|
7
|
+
data.tar.gz: 18d287d1d24792d9e63d7d2f2db3bee74fd929d7e4fd5ea1dccb50447a7278de7cfe8f088184064298809e70ef82b3d719dc27c5d6dc07edbc01b5747dff7246
|
data/CHANGELOG.md
CHANGED
@@ -6,6 +6,11 @@ section below.
|
|
6
6
|
|
7
7
|
## Unreleased changes
|
8
8
|
|
9
|
+
## Version 3.9.6
|
10
|
+
|
11
|
+
- [#388](https://github.com/Shopify/statsd-instrument/pull/388) - Properly fixing the bug when using aggregation and sending sampled
|
12
|
+
histograms, now the client will respect the sampling rate when sending the metrics and pass it down to the aggregator.
|
13
|
+
|
9
14
|
## Version 3.9.5
|
10
15
|
|
11
16
|
- [#387](https://github.com/Shopify/statsd-instrument/pull/387) - Fixing bug when using aggregation and sending sampled
|
@@ -138,7 +138,7 @@ module StatsD
|
|
138
138
|
def aggregate_timing(name, value, tags: [], no_prefix: false, type: DISTRIBUTION, sample_rate: CONST_SAMPLE_RATE)
|
139
139
|
unless thread_healthcheck
|
140
140
|
@sink << datagram_builder(no_prefix: no_prefix).timing_value_packed(
|
141
|
-
name, type.to_s, [value],
|
141
|
+
name, type.to_s, [value], sample_rate, tags
|
142
142
|
)
|
143
143
|
return
|
144
144
|
end
|
@@ -318,25 +318,27 @@ module StatsD
|
|
318
318
|
# @param tags (see #increment)
|
319
319
|
# @return [void]
|
320
320
|
def distribution(name, value = nil, sample_rate: nil, tags: nil, no_prefix: false, &block)
|
321
|
+
if block_given?
|
322
|
+
return latency(name, sample_rate: sample_rate, tags: tags, metric_type: :d, no_prefix: no_prefix, &block)
|
323
|
+
end
|
324
|
+
|
325
|
+
# For all timing metrics, we have to use the sampling logic.
|
326
|
+
# Not doing so would impact performance and CPU usage.
|
327
|
+
# See Datadog's documentation for more details: https://github.com/DataDog/datadog-go/blob/20af2dbfabbbe6bd0347780cd57ed931f903f223/statsd/aggregator.go#L281-L283
|
321
328
|
sample_rate ||= @default_sample_rate
|
322
329
|
if sample_rate && !sample?(sample_rate)
|
323
|
-
# For all timing metrics, we have to use the sampling logic.
|
324
|
-
# Not doing so would impact performance and CPU usage.
|
325
|
-
# See Datadog's documentation for more details: https://github.com/DataDog/datadog-go/blob/20af2dbfabbbe6bd0347780cd57ed931f903f223/statsd/aggregator.go#L281-L283
|
326
|
-
|
327
|
-
if block_given?
|
328
|
-
return yield
|
329
|
-
end
|
330
|
-
|
331
330
|
return StatsD::Instrument::VOID
|
332
331
|
end
|
333
332
|
|
334
|
-
if block_given?
|
335
|
-
return latency(name, sample_rate: sample_rate, tags: tags, metric_type: :d, no_prefix: no_prefix, &block)
|
336
|
-
end
|
337
|
-
|
338
333
|
if @enable_aggregation
|
339
|
-
@aggregator.aggregate_timing(
|
334
|
+
@aggregator.aggregate_timing(
|
335
|
+
name,
|
336
|
+
value,
|
337
|
+
tags: tags,
|
338
|
+
no_prefix: no_prefix,
|
339
|
+
type: :d,
|
340
|
+
sample_rate: sample_rate,
|
341
|
+
)
|
340
342
|
return StatsD::Instrument::VOID
|
341
343
|
end
|
342
344
|
|
@@ -392,13 +394,26 @@ module StatsD
|
|
392
394
|
ensure
|
393
395
|
stop = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
|
394
396
|
|
395
|
-
|
396
|
-
|
397
|
-
|
398
|
-
|
399
|
-
|
400
|
-
|
401
|
-
|
397
|
+
# For all timing metrics, we have to use the sampling logic.
|
398
|
+
# Not doing so would impact performance and CPU usage.
|
399
|
+
# See Datadog's documentation for more details:
|
400
|
+
# https://github.com/DataDog/datadog-go/blob/20af2dbfabbbe6bd0347780cd57ed931f903f223/statsd/aggregator.go#L281-L283
|
401
|
+
sample_rate ||= @default_sample_rate
|
402
|
+
if sample_rate.nil? || sample?(sample_rate)
|
403
|
+
|
404
|
+
metric_type ||= datagram_builder(no_prefix: no_prefix).latency_metric_type
|
405
|
+
latency_in_ms = stop - start
|
406
|
+
|
407
|
+
if @enable_aggregation
|
408
|
+
@aggregator.aggregate_timing(
|
409
|
+
name,
|
410
|
+
latency_in_ms,
|
411
|
+
tags: tags,
|
412
|
+
no_prefix: no_prefix,
|
413
|
+
type: metric_type,
|
414
|
+
sample_rate: sample_rate,
|
415
|
+
)
|
416
|
+
else
|
402
417
|
emit(datagram_builder(no_prefix: no_prefix).send(metric_type, name, latency_in_ms, sample_rate, tags))
|
403
418
|
end
|
404
419
|
end
|
data/test/aggregator_test.rb
CHANGED
@@ -1,7 +1,6 @@
|
|
1
1
|
# frozen_string_literal: true
|
2
2
|
|
3
3
|
require "test_helper"
|
4
|
-
require "ostruct"
|
5
4
|
|
6
5
|
class AggregatorTest < Minitest::Test
|
7
6
|
class CaptureLogger
|
@@ -70,6 +69,7 @@ class AggregatorTest < Minitest::Test
|
|
70
69
|
sampled_datagram = @sink.datagrams.find { |d| d.name == "timing.sampled" }
|
71
70
|
assert_equal([60.0, 80.0], sampled_datagram.value)
|
72
71
|
assert_equal(0.01, sampled_datagram.sample_rate)
|
72
|
+
assert_equal("timing.sampled:60.0:80.0|d|@0.01", sampled_datagram.source)
|
73
73
|
|
74
74
|
unsampled_datagram = @sink.datagrams.find { |d| d.name == "timing.unsampled" }
|
75
75
|
assert_equal(60.0, unsampled_datagram.value)
|
@@ -208,7 +208,7 @@ class AggregatorTest < Minitest::Test
|
|
208
208
|
|
209
209
|
# Additional metrics should also go through synchronously
|
210
210
|
@subject.increment("foo", 1, tags: { foo: "bar" })
|
211
|
-
@subject.aggregate_timing("bar", 200, tags: { foo: "bar" })
|
211
|
+
@subject.aggregate_timing("bar", 200, tags: { foo: "bar" }, sample_rate: 0.5)
|
212
212
|
|
213
213
|
# Verify new metrics were also sent immediately
|
214
214
|
assert_equal(5, @sink.datagrams.size)
|
@@ -220,6 +220,7 @@ class AggregatorTest < Minitest::Test
|
|
220
220
|
timing_datagram = @sink.datagrams.select { |d| d.name == "bar" }.last
|
221
221
|
assert_equal([200.0], [timing_datagram.value])
|
222
222
|
assert_equal(["foo:bar"], timing_datagram.tags)
|
223
|
+
assert_equal(0.5, timing_datagram.sample_rate)
|
223
224
|
|
224
225
|
# undo the stubbing
|
225
226
|
@subject.unstub(:thread_healthcheck)
|
data/test/client_test.rb
CHANGED
@@ -90,7 +90,7 @@ class ClientTest < Minitest::Test
|
|
90
90
|
client.measure("block_duration_example") { 1 + 1 }
|
91
91
|
client.force_flush
|
92
92
|
|
93
|
-
datagram = client.sink.datagrams.
|
93
|
+
datagram = client.sink.datagrams.find { |d| d.name == "bar.foo" }
|
94
94
|
assert_equal("bar.foo", datagram.name)
|
95
95
|
assert_equal(2, datagram.value)
|
96
96
|
|
@@ -249,12 +249,17 @@ class ClientTest < Minitest::Test
|
|
249
249
|
mock_sink = mock("sink")
|
250
250
|
mock_sink.stubs(:sample?).returns(false, true, false, false, true)
|
251
251
|
# Since we are aggregating, we only expect a single datagram.
|
252
|
-
mock_sink.expects(:<<).with("metric:60:60|d").once
|
252
|
+
mock_sink.expects(:<<).with("metric:60:60|d|@0.5").once
|
253
253
|
mock_sink.expects(:flush).once
|
254
254
|
|
255
255
|
client = StatsD::Instrument::Client.new(sink: mock_sink, default_sample_rate: 0.5, enable_aggregation: true)
|
256
256
|
5.times { client.distribution("metric", 60) }
|
257
257
|
client.force_flush
|
258
|
+
|
259
|
+
# undo mock
|
260
|
+
mock_sink.unstub(:sample?)
|
261
|
+
mock_sink.unstub(:<<)
|
262
|
+
mock_sink.unstub(:flush)
|
258
263
|
end
|
259
264
|
|
260
265
|
def test_clone_with_prefix_option
|
data/test/integration_test.rb
CHANGED
@@ -77,4 +77,30 @@ class IntegrationTest < Minitest::Test
|
|
77
77
|
assert_equal("counter:20|c", @server.recvfrom(100).first)
|
78
78
|
assert_operator(Time.now - before_flush, :<, 0.3, "Flush and ingest should have happened within 0.4s")
|
79
79
|
end
|
80
|
+
|
81
|
+
def test_live_local_udp_socket_with_aggregation_sampled_scenario
|
82
|
+
client = StatsD::Instrument::Environment.new(
|
83
|
+
"STATSD_ADDR" => "#{@server.addr[2]}:#{@server.addr[1]}",
|
84
|
+
"STATSD_IMPLEMENTATION" => "dogstatsd",
|
85
|
+
"STATSD_ENV" => "production",
|
86
|
+
"STATSD_ENABLE_AGGREGATION" => "true",
|
87
|
+
"STATSD_AGGREGATION_INTERVAL" => "0.1",
|
88
|
+
).client
|
89
|
+
|
90
|
+
100.times do
|
91
|
+
client.increment("counter", 2)
|
92
|
+
client.distribution("test_distribution", 3, sample_rate: 0.1)
|
93
|
+
end
|
94
|
+
|
95
|
+
sleep(0.2)
|
96
|
+
|
97
|
+
packets = []
|
98
|
+
while IO.select([@server], nil, nil, 0.1)
|
99
|
+
packets << @server.recvfrom(300).first
|
100
|
+
end
|
101
|
+
packets = packets.map { |packet| packet.split("\n") }.flatten
|
102
|
+
|
103
|
+
assert_match(/counter:\d+|c/, packets.find { |packet| packet.start_with?("counter:") })
|
104
|
+
assert_match(/test_distribution:\d+:3|d/, packets.find { |packet| packet.start_with?("test_distribution:") })
|
105
|
+
end
|
80
106
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: statsd-instrument
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.9.
|
4
|
+
version: 3.9.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Jesse Storimer
|
@@ -10,7 +10,7 @@ authors:
|
|
10
10
|
autorequire:
|
11
11
|
bindir: bin
|
12
12
|
cert_chain: []
|
13
|
-
date: 2024-10-
|
13
|
+
date: 2024-10-31 00:00:00.000000000 Z
|
14
14
|
dependencies: []
|
15
15
|
description: A StatsD client for Ruby apps. Provides metaprogramming methods to inject
|
16
16
|
StatsD instrumentation into your code.
|