fluent-plugin-datadog-log 0.1.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +3 -0
- data/Gemfile.lock +91 -0
- data/LICENSE +201 -0
- data/README.rdoc +51 -0
- data/Rakefile +43 -0
- data/fluent-plugin-datadog-log.gemspec +31 -0
- data/fluent-plugin-datadog-log.gemspec~ +31 -0
- data/fluent-plugin-datadog.gemspec~ +36 -0
- data/lib/datadog/log.rb +123 -0
- data/lib/fluent/plugin/monitoring.rb +55 -0
- data/lib/fluent/plugin/out_datadog_log.rb +549 -0
- data/pkg/fluent-plugin-datadog-0.1.0.gem +0 -0
- data/pkg/fluent-plugin-datadog-log-0.1.0.gem +0 -0
- data/test/helper.rb +40 -0
- data/test/plugin/base_test.rb +173 -0
- data/test/plugin/constants.rb +188 -0
- data/test/plugin/test_out_datadog_log.rb +206 -0
- metadata +193 -0
@@ -0,0 +1,173 @@
|
|
1
|
+
# Copyright 2017 Yusuke KUOKA All rights reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
require 'helper'
|
16
|
+
require 'mocha/test_unit'
|
17
|
+
require 'webmock/test_unit'
|
18
|
+
require 'prometheus/client'
|
19
|
+
require 'fluent/test/driver/output'
|
20
|
+
require 'fluent/test/helpers'
|
21
|
+
|
22
|
+
require_relative 'constants'
|
23
|
+
|
24
|
+
# Unit tests for Datadog plugin
|
25
|
+
module BaseTest
|
26
|
+
include Constants
|
27
|
+
include Fluent::Test::Helpers
|
28
|
+
|
29
|
+
def setup
|
30
|
+
Fluent::Test.setup
|
31
|
+
require 'fluent/plugin/out_datadog_log'
|
32
|
+
@logs_sent = []
|
33
|
+
end
|
34
|
+
|
35
|
+
def create_driver(conf = APPLICATION_DEFAULT_CONFIG)
|
36
|
+
Fluent::Test::Driver::Output.new(Fluent::Plugin::DatadogOutput)
|
37
|
+
.configure(conf)
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def setup_no_metadata_service_stubs
|
43
|
+
# Simulate a machine with no metadata service present
|
44
|
+
stub_request(:any, %r{http://169.254.169.254/.*})
|
45
|
+
.to_raise(Errno::EHOSTUNREACH)
|
46
|
+
end
|
47
|
+
|
48
|
+
def setup_ec2_metadata_stubs
|
49
|
+
# Stub the root, used for platform detection.
|
50
|
+
stub_request(:get, 'http://169.254.169.254')
|
51
|
+
.to_return(status: 200, headers: { 'Server' => 'EC2ws' })
|
52
|
+
|
53
|
+
# Stub the identity document lookup made by the agent.
|
54
|
+
stub_request(:get, 'http://169.254.169.254/latest/dynamic/' \
|
55
|
+
'instance-identity/document')
|
56
|
+
.to_return(body: EC2_IDENTITY_DOCUMENT, status: 200,
|
57
|
+
headers: { 'Content-Length' => EC2_IDENTITY_DOCUMENT.length })
|
58
|
+
end
|
59
|
+
|
60
|
+
def setup_logging_stubs
|
61
|
+
yield
|
62
|
+
end
|
63
|
+
|
64
|
+
def setup_prometheus
|
65
|
+
Prometheus::Client.registry.instance_variable_set('@metrics', {})
|
66
|
+
end
|
67
|
+
|
68
|
+
# Provide a stub context that initializes @logs_sent, executes the block and
|
69
|
+
# resets WebMock at the end.
|
70
|
+
def new_stub_context
|
71
|
+
@logs_sent = []
|
72
|
+
yield
|
73
|
+
WebMock.reset!
|
74
|
+
end
|
75
|
+
|
76
|
+
# Container.
|
77
|
+
|
78
|
+
def container_tag_with_container_name(container_name)
|
79
|
+
"kubernetes.#{CONTAINER_POD_NAME}_#{CONTAINER_NAMESPACE_NAME}_" \
|
80
|
+
"#{container_name}"
|
81
|
+
end
|
82
|
+
|
83
|
+
def container_log_entry_with_metadata(
|
84
|
+
log, container_name = CONTAINER_CONTAINER_NAME)
|
85
|
+
{
|
86
|
+
log: log,
|
87
|
+
stream: CONTAINER_STREAM,
|
88
|
+
time: CONTAINER_TIMESTAMP,
|
89
|
+
kubernetes: {
|
90
|
+
namespace_id: CONTAINER_NAMESPACE_ID,
|
91
|
+
namespace_name: CONTAINER_NAMESPACE_NAME,
|
92
|
+
pod_id: CONTAINER_POD_ID,
|
93
|
+
pod_name: CONTAINER_POD_NAME,
|
94
|
+
container_name: container_name,
|
95
|
+
labels: {
|
96
|
+
CONTAINER_LABEL_KEY => CONTAINER_LABEL_VALUE
|
97
|
+
}
|
98
|
+
}
|
99
|
+
}
|
100
|
+
end
|
101
|
+
|
102
|
+
def container_log_entry(log, stream = CONTAINER_STREAM)
|
103
|
+
{
|
104
|
+
log: log,
|
105
|
+
stream: stream,
|
106
|
+
time: CONTAINER_TIMESTAMP
|
107
|
+
}
|
108
|
+
end
|
109
|
+
|
110
|
+
# Docker Container.
|
111
|
+
|
112
|
+
def docker_container_stdout_stderr_log_entry(
|
113
|
+
log, stream = DOCKER_CONTAINER_STREAM_STDOUT)
|
114
|
+
severity = if stream == DOCKER_CONTAINER_STREAM_STDOUT
|
115
|
+
'INFO'
|
116
|
+
else
|
117
|
+
'ERROR'
|
118
|
+
end
|
119
|
+
{
|
120
|
+
log: log,
|
121
|
+
source: stream,
|
122
|
+
severity: severity
|
123
|
+
}
|
124
|
+
end
|
125
|
+
|
126
|
+
def docker_container_application_log_entry(log)
|
127
|
+
{
|
128
|
+
log: log,
|
129
|
+
time: DOCKER_CONTAINER_TIMESTAMP
|
130
|
+
}
|
131
|
+
end
|
132
|
+
|
133
|
+
def log_entry(i)
|
134
|
+
"test log entry #{i}"
|
135
|
+
end
|
136
|
+
|
137
|
+
# This module expects the methods below to be overridden.
|
138
|
+
|
139
|
+
def assert_prometheus_metric_value(metric_name, expected_value, labels = {})
|
140
|
+
metric = Prometheus::Client.registry.get(metric_name)
|
141
|
+
assert_not_nil(metric)
|
142
|
+
assert_equal(expected_value, metric.get(labels))
|
143
|
+
end
|
144
|
+
|
145
|
+
# Get the fields of the payload.
|
146
|
+
def get_fields(_payload)
|
147
|
+
_undefined
|
148
|
+
end
|
149
|
+
|
150
|
+
# Get the value of a struct field.
|
151
|
+
def get_struct(_field)
|
152
|
+
_undefined
|
153
|
+
end
|
154
|
+
|
155
|
+
# Get the value of a string field.
|
156
|
+
def get_string(_field)
|
157
|
+
_undefined
|
158
|
+
end
|
159
|
+
|
160
|
+
# Get the value of a number field.
|
161
|
+
def get_number(_field)
|
162
|
+
_undefined
|
163
|
+
end
|
164
|
+
|
165
|
+
# The null value.
|
166
|
+
def null_value(_field)
|
167
|
+
_undefined
|
168
|
+
end
|
169
|
+
|
170
|
+
def _undefined
|
171
|
+
fail "Method #{__callee__} is unimplemented and needs to be overridden."
|
172
|
+
end
|
173
|
+
end
|
@@ -0,0 +1,188 @@
|
|
1
|
+
# Copyright 2017 Yusuke KUOKA All rights reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
# Constants used by unit tests for Datadog plugin.
|
16
|
+
module Constants
|
17
|
+
# Generic attributes.
|
18
|
+
HOSTNAME = Socket.gethostname
|
19
|
+
|
20
|
+
# TODO(qingling128) Separate constants into different submodules.
|
21
|
+
# Attributes used for the GCE metadata service.
|
22
|
+
ZONE = 'us-central1-b'
|
23
|
+
VM_ID = '9876543210'
|
24
|
+
|
25
|
+
# Attributes used for the Metadata Agent resources.
|
26
|
+
METADATA_ZONE = 'us-central1-c'
|
27
|
+
METADATA_VM_ID = '0123456789'
|
28
|
+
|
29
|
+
# Attributes used for custom (overridden) configs.
|
30
|
+
CUSTOM_PROJECT_ID = 'test-custom-project-id'
|
31
|
+
CUSTOM_ZONE = 'us-custom-central1-b'
|
32
|
+
CUSTOM_VM_ID = 'C9876543210'
|
33
|
+
CUSTOM_HOSTNAME = 'custom.hostname.org'
|
34
|
+
|
35
|
+
# Attributes used for the EC2 metadata service.
|
36
|
+
EC2_PROJECT_ID = 'test-ec2-project-id'
|
37
|
+
EC2_ZONE = 'us-west-2b'
|
38
|
+
EC2_PREFIXED_ZONE = 'aws:' + EC2_ZONE
|
39
|
+
EC2_VM_ID = 'i-81c16767'
|
40
|
+
EC2_ACCOUNT_ID = '123456789012'
|
41
|
+
|
42
|
+
# The formatting here matches the format used on the VM.
|
43
|
+
EC2_IDENTITY_DOCUMENT = %({
|
44
|
+
"accountId" : "#{EC2_ACCOUNT_ID}",
|
45
|
+
"availabilityZone" : "#{EC2_ZONE}",
|
46
|
+
"instanceId" : "#{EC2_VM_ID}"
|
47
|
+
})
|
48
|
+
|
49
|
+
# Managed VMs specific labels.
|
50
|
+
MANAGED_VM_BACKEND_NAME = 'default'
|
51
|
+
MANAGED_VM_BACKEND_VERSION = 'guestbook2.0'
|
52
|
+
|
53
|
+
# Docker Container labels.
|
54
|
+
DOCKER_CONTAINER_ID = '0d0f03ff8d3c42688692536d1af77a28cd135c0a5c531f25a31'
|
55
|
+
DOCKER_CONTAINER_NAME = 'happy_hippo'
|
56
|
+
DOCKER_CONTAINER_STREAM_STDOUT = 'stdout'
|
57
|
+
DOCKER_CONTAINER_STREAM_STDERR = 'stderr'
|
58
|
+
# Timestamp for 1234567890 seconds and 987654321 nanoseconds since epoch.
|
59
|
+
DOCKER_CONTAINER_TIMESTAMP = '2009-02-13T23:31:30.987654321Z'
|
60
|
+
DOCKER_CONTAINER_SECONDS_EPOCH = 1_234_567_890
|
61
|
+
DOCKER_CONTAINER_NANOS = 987_654_321
|
62
|
+
|
63
|
+
# Container Engine / Kubernetes specific labels.
|
64
|
+
CONTAINER_CLUSTER_NAME = 'cluster-1'
|
65
|
+
CONTAINER_NAMESPACE_ID = '898268c8-4a36-11e5-9d81-42010af0194c'
|
66
|
+
CONTAINER_NAMESPACE_NAME = 'kube-system'
|
67
|
+
CONTAINER_POD_ID = 'cad3c3c4-4b9c-11e5-9d81-42010af0194c'
|
68
|
+
CONTAINER_POD_NAME = 'redis-master-c0l82.foo.bar'
|
69
|
+
CONTAINER_CONTAINER_NAME = 'redis'
|
70
|
+
CONTAINER_LABEL_KEY = 'component'
|
71
|
+
CONTAINER_LABEL_VALUE = 'redis-component'
|
72
|
+
CONTAINER_STREAM = 'stdout'
|
73
|
+
CONTAINER_SEVERITY = 'INFO'
|
74
|
+
# Timestamp for 1234567890 seconds and 987654321 nanoseconds since epoch.
|
75
|
+
CONTAINER_TIMESTAMP = '2009-02-13T23:31:30.987654321Z'
|
76
|
+
CONTAINER_SECONDS_EPOCH = 1_234_567_890
|
77
|
+
CONTAINER_NANOS = 987_654_321
|
78
|
+
|
79
|
+
# Cloud Functions specific labels.
|
80
|
+
CLOUDFUNCTIONS_FUNCTION_NAME = '$My_Function.Name-@1'
|
81
|
+
CLOUDFUNCTIONS_REGION = 'us-central1'
|
82
|
+
CLOUDFUNCTIONS_EXECUTION_ID = '123-0'
|
83
|
+
CLOUDFUNCTIONS_CLUSTER_NAME = 'cluster-1'
|
84
|
+
CLOUDFUNCTIONS_NAMESPACE_NAME = 'default'
|
85
|
+
CLOUDFUNCTIONS_POD_NAME = 'd.dc.myu.uc.functionp.pc.name-a.a1.987-c0l82'
|
86
|
+
CLOUDFUNCTIONS_CONTAINER_NAME = 'worker'
|
87
|
+
|
88
|
+
# Dataflow specific labels.
|
89
|
+
DATAFLOW_REGION = 'us-central1'
|
90
|
+
DATAFLOW_JOB_NAME = 'job_name_1'
|
91
|
+
DATAFLOW_JOB_ID = 'job_id_1'
|
92
|
+
DATAFLOW_STEP_ID = 'step_1'
|
93
|
+
DATAFLOW_TAG = 'dataflow-worker'
|
94
|
+
|
95
|
+
# Dataproc specific labels.
|
96
|
+
DATAPROC_CLUSTER_NAME = 'test-cluster'
|
97
|
+
DATAPROC_CLUSTER_UUID = '00000000-0000-0000-0000-000000000000'
|
98
|
+
DATAPROC_REGION = 'unittest'
|
99
|
+
|
100
|
+
# ML specific labels.
|
101
|
+
ML_REGION = 'us-central1'
|
102
|
+
ML_JOB_ID = 'job_name_1'
|
103
|
+
ML_TASK_NAME = 'task_name_1'
|
104
|
+
ML_TRIAL_ID = 'trial_id_1'
|
105
|
+
ML_LOG_AREA = 'log_area_1'
|
106
|
+
ML_TAG = 'master-replica-0'
|
107
|
+
|
108
|
+
# Parameters used for authentication.
|
109
|
+
AUTH_GRANT_TYPE = 'urn:ietf:params:oauth:grant-type:jwt-bearer'
|
110
|
+
FAKE_AUTH_TOKEN = 'abc123'
|
111
|
+
|
112
|
+
# Information about test credentials files.
|
113
|
+
# path: Path to the credentials file.
|
114
|
+
# project_id: ID of the project, which must correspond to the file contents.
|
115
|
+
IAM_CREDENTIALS = {
|
116
|
+
path: 'test/plugin/data/iam-credentials.json',
|
117
|
+
project_id: 'fluent-test-project'
|
118
|
+
}
|
119
|
+
LEGACY_CREDENTIALS = {
|
120
|
+
path: 'test/plugin/data/credentials.json',
|
121
|
+
project_id: '847859579879'
|
122
|
+
}
|
123
|
+
INVALID_CREDENTIALS = {
|
124
|
+
path: 'test/plugin/data/invalid_credentials.json',
|
125
|
+
project_id: ''
|
126
|
+
}
|
127
|
+
|
128
|
+
# Configuration files for various test scenarios.
|
129
|
+
APPLICATION_DEFAULT_CONFIG = %(
|
130
|
+
|
131
|
+
)
|
132
|
+
|
133
|
+
DETECT_JSON_CONFIG = %(
|
134
|
+
detect_json true
|
135
|
+
)
|
136
|
+
|
137
|
+
PARTIAL_SUCCESS_CONFIG = %(
|
138
|
+
partial_success true
|
139
|
+
)
|
140
|
+
|
141
|
+
REQUIRE_VALID_TAGS_CONFIG = %(
|
142
|
+
require_valid_tags true
|
143
|
+
)
|
144
|
+
|
145
|
+
NO_METADATA_SERVICE_CONFIG = %(
|
146
|
+
use_metadata_service false
|
147
|
+
)
|
148
|
+
|
149
|
+
PROMETHEUS_ENABLE_CONFIG = %(
|
150
|
+
enable_monitoring true
|
151
|
+
monitoring_type prometheus
|
152
|
+
)
|
153
|
+
|
154
|
+
CUSTOM_METADATA_CONFIG = %(
|
155
|
+
zone #{CUSTOM_ZONE}
|
156
|
+
vm_id #{CUSTOM_VM_ID}
|
157
|
+
vm_name #{CUSTOM_HOSTNAME}
|
158
|
+
)
|
159
|
+
|
160
|
+
# Service configurations for various services.
|
161
|
+
|
162
|
+
CUSTOM_LABELS_MESSAGE = {
|
163
|
+
'customKey' => 'value'
|
164
|
+
}
|
165
|
+
# Tags and their sanitized and encoded version.
|
166
|
+
VALID_TAGS = {
|
167
|
+
'test' => 'test',
|
168
|
+
'germanß' => 'german%C3%9F',
|
169
|
+
'chinese中' => 'chinese%E4%B8%AD',
|
170
|
+
'specialCharacter/_-.' => 'specialCharacter%2F_-.',
|
171
|
+
'abc@&^$*' => 'abc%40%26%5E%24%2A',
|
172
|
+
'@&^$*' => '%40%26%5E%24%2A'
|
173
|
+
}
|
174
|
+
INVALID_TAGS = {
|
175
|
+
# Non-string tags.
|
176
|
+
123 => '123',
|
177
|
+
1.23 => '1.23',
|
178
|
+
[1, 2, 3] => '%5B1%2C%202%2C%203%5D',
|
179
|
+
{ key: 'value' } => '%7B%22key%22%3D%3E%22value%22%7D',
|
180
|
+
# Non-utf8 string tags.
|
181
|
+
"nonutf8#{[0x92].pack('C*')}" => 'nonutf8%20',
|
182
|
+
"abc#{[0x92].pack('C*')}" => 'abc%20',
|
183
|
+
"#{[0x92].pack('C*')}" => '%20',
|
184
|
+
# Empty string tag.
|
185
|
+
'' => '_'
|
186
|
+
}
|
187
|
+
ALL_TAGS = VALID_TAGS.merge(INVALID_TAGS)
|
188
|
+
end
|
@@ -0,0 +1,206 @@
|
|
1
|
+
# Copyright 2017 Yusuke KUOKA All rights reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
require_relative 'base_test'
|
16
|
+
|
17
|
+
# Unit tests for Datadog Log plugin
|
18
|
+
class DatadogLogOutputTest < Test::Unit::TestCase
|
19
|
+
include BaseTest
|
20
|
+
|
21
|
+
def test_configure
|
22
|
+
new_stub_context do
|
23
|
+
setup_ec2_metadata_stubs
|
24
|
+
|
25
|
+
d = create_driver(<<-EOC)
|
26
|
+
type datadog_log
|
27
|
+
api_key myapikey
|
28
|
+
service myservice
|
29
|
+
source mysource
|
30
|
+
EOC
|
31
|
+
|
32
|
+
assert_equal 'myapikey', d.instance.api_key
|
33
|
+
assert_equal 'myservice', d.instance.service
|
34
|
+
assert_equal 'mysource', d.instance.source
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
def test_write
|
39
|
+
new_stub_context do
|
40
|
+
setup_ec2_metadata_stubs
|
41
|
+
|
42
|
+
timestamp_str = '2006-01-02T15:04:05.000000+00:00'
|
43
|
+
t = DateTime.rfc3339(timestamp_str).to_time
|
44
|
+
time = Fluent::EventTime.from_time(t)
|
45
|
+
d = create_driver(<<-EOC)
|
46
|
+
type datadog_log
|
47
|
+
api_key myapikey
|
48
|
+
service myservice
|
49
|
+
source mysource
|
50
|
+
source_category mysourcecategory
|
51
|
+
logset mylogset
|
52
|
+
log_level debug
|
53
|
+
EOC
|
54
|
+
conn = StubConn.new
|
55
|
+
fluentd_tag = 'mytag'
|
56
|
+
Net::TCPClient.stubs(:new)
|
57
|
+
.with(server: ':10516', ssl: true)
|
58
|
+
.returns(conn)
|
59
|
+
d.run(default_tag: fluentd_tag) do
|
60
|
+
record = {
|
61
|
+
'log' => 'mymsg',
|
62
|
+
'kubernetes' => {
|
63
|
+
'namespace' => 'myns',
|
64
|
+
'pod_name' => 'mypod',
|
65
|
+
'container_name' => 'mycontainer',
|
66
|
+
'labels' => {
|
67
|
+
'k8s-app' => 'myapp'
|
68
|
+
}
|
69
|
+
}
|
70
|
+
}
|
71
|
+
d.feed(time, record)
|
72
|
+
end
|
73
|
+
|
74
|
+
# fail d.logs.inspect
|
75
|
+
assert_equal(1, d.logs.count { |l| l =~ /Sent payload to Datadog/ })
|
76
|
+
assert_equal(1, conn.sent.size)
|
77
|
+
# rubocop:disable LineLength
|
78
|
+
payload = %(myapikey/mylogset <46>0 2006-01-02T15:04:05.000000+00:00 i-81c16767 myservice - - [dd ddsource="mysource"][dd ddsourcecategory="mysourcecategory"][dd ddtags="pod_name=mypod,container_name=mycontainer,kube_k8s-app=myapp,host=i-81c16767,zone=aws:us-west-2b,aws_account_id=123456789012"] mymsg\n)
|
79
|
+
# rubocop:enable LineLength
|
80
|
+
assert_equal(payload, conn.sent.first)
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
def test_prometheus_metrics
|
85
|
+
new_stub_context do
|
86
|
+
setup_ec2_metadata_stubs
|
87
|
+
timestamp_str = '2006-01-02T15:04:05.000000+00:00'
|
88
|
+
t = DateTime.rfc3339(timestamp_str).to_time
|
89
|
+
time = Fluent::EventTime.from_time(t)
|
90
|
+
[
|
91
|
+
# Single successful request.
|
92
|
+
[false, 0, 1, 1, [1, 0, 1, 0, 0]],
|
93
|
+
# Several successful requests.
|
94
|
+
[false, 0, 2, 1, [2, 0, 2, 0, 0]]
|
95
|
+
].each do |_should_fail, _code, request_count, entry_count, metric_values|
|
96
|
+
setup_prometheus
|
97
|
+
(1..request_count).each do
|
98
|
+
d = create_driver(<<-EOC)
|
99
|
+
type datadog_log
|
100
|
+
api_key myapikey
|
101
|
+
service myservice
|
102
|
+
source mysource
|
103
|
+
source_category mysourcecategory
|
104
|
+
logset mylogset
|
105
|
+
log_level debug
|
106
|
+
enable_monitoring true
|
107
|
+
EOC
|
108
|
+
conn = StubConn.new
|
109
|
+
Net::TCPClient.stubs(:new)
|
110
|
+
.with(server: ':10516', ssl: true)
|
111
|
+
.returns(conn)
|
112
|
+
d.run(default_tag: 'mytag') do
|
113
|
+
(1..entry_count).each do |i|
|
114
|
+
d.feed time, 'message' => log_entry(i.to_s)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
successful_requests_count, failed_requests_count,
|
119
|
+
ingested_entries_count, dropped_entries_count,
|
120
|
+
retried_entries_count = metric_values
|
121
|
+
assert_prometheus_metric_value(:datadog_successful_requests_count,
|
122
|
+
successful_requests_count)
|
123
|
+
assert_prometheus_metric_value(:datadog_failed_requests_count,
|
124
|
+
failed_requests_count)
|
125
|
+
assert_prometheus_metric_value(:datadog_ingested_entries_count,
|
126
|
+
ingested_entries_count)
|
127
|
+
assert_prometheus_metric_value(:datadog_dropped_entries_count,
|
128
|
+
dropped_entries_count)
|
129
|
+
assert_prometheus_metric_value(:datadog_retried_entries_count,
|
130
|
+
retried_entries_count)
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def test_struct_payload_non_utf8_log
|
136
|
+
# d.emit('msg' => log_entry(0),
|
137
|
+
# 'normal_key' => "test#{non_utf8_character}non utf8",
|
138
|
+
# "non_utf8#{non_utf8_character}key" => 5000,
|
139
|
+
# 'nested_struct' => { "non_utf8#{non_utf8_character}key" => \
|
140
|
+
# "test#{non_utf8_character}non utf8" },
|
141
|
+
# 'null_field' => nil)
|
142
|
+
end
|
143
|
+
|
144
|
+
class StubConn
|
145
|
+
attr_reader :sent
|
146
|
+
|
147
|
+
def initialize
|
148
|
+
@sent = []
|
149
|
+
end
|
150
|
+
|
151
|
+
def write(payload)
|
152
|
+
@sent << payload
|
153
|
+
end
|
154
|
+
|
155
|
+
def close
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
private
|
160
|
+
|
161
|
+
# Use the right single quotation mark as the sample non-utf8 character.
|
162
|
+
def non_utf8_character
|
163
|
+
[0x92].pack('C*')
|
164
|
+
end
|
165
|
+
|
166
|
+
# For an optional field with default values, Protobuf omits the field when it
|
167
|
+
# is deserialized to json. So we need to add an extra check for gRPC which
|
168
|
+
# uses Protobuf.
|
169
|
+
#
|
170
|
+
# An optional block can be passed in if we need to assert something other than
|
171
|
+
# a plain equal. e.g. assert_in_delta.
|
172
|
+
def assert_equal_with_default(field, expected_value, default_value, entry)
|
173
|
+
if expected_value == default_value
|
174
|
+
assert_nil field
|
175
|
+
elsif block_given?
|
176
|
+
yield
|
177
|
+
else
|
178
|
+
assert_equal expected_value, field, entry
|
179
|
+
end
|
180
|
+
end
|
181
|
+
|
182
|
+
# Get the fields of the payload.
|
183
|
+
def get_fields(payload)
|
184
|
+
payload['fields']
|
185
|
+
end
|
186
|
+
|
187
|
+
# Get the value of a struct field.
|
188
|
+
def get_struct(field)
|
189
|
+
field['structValue']
|
190
|
+
end
|
191
|
+
|
192
|
+
# Get the value of a string field.
|
193
|
+
def get_string(field)
|
194
|
+
field['stringValue']
|
195
|
+
end
|
196
|
+
|
197
|
+
# Get the value of a number field.
|
198
|
+
def get_number(field)
|
199
|
+
field['numberValue']
|
200
|
+
end
|
201
|
+
|
202
|
+
# The null value.
|
203
|
+
def null_value
|
204
|
+
{ 'nullValue' => 'NULL_VALUE' }
|
205
|
+
end
|
206
|
+
end
|