fluent-plugin-datadog-log 0.1.0.rc18 → 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,131 +0,0 @@
1
- require 'openssl'
2
-
3
- require 'semantic_logger'
4
-
5
- SemanticLogger.default_level = :warn
6
- SemanticLogger.add_appender(io: STDOUT, formatter: :json)
7
-
8
- require 'net/tcp_client'
9
- require 'socket'
10
- require 'time'
11
-
12
- module Datadog
13
- module Log
14
- TRUNCATED_MSG = '...TRUNCATED...'
15
-
16
- TRUNCATED_LEN = TRUNCATED_MSG.size
17
-
18
- # MaxMessageLen is the maximum length for any message we send to the intake
19
- # see https://github.com/DataDog/datadog-log-agent/blob/2394da8c79a6cadbcd1e98d6c89c437becec2732/pkg/config/constants.go#L9-L10
20
- DD_MAX_MESSAGE_LEN = 1 * 1000 * 1000
21
-
22
- MAX_MESSAGE_LEN = DD_MAX_MESSAGE_LEN - TRUNCATED_LEN
23
-
24
- def truncate_message(msg)
25
- if msg.size > DD_MAX_MESSAGE_LEN
26
- msg.slice(0, MAX_MESSAGE_LEN) + TRUNCATED_MSG
27
- else
28
- msg
29
- end
30
- end
31
-
32
- # Given a list of tags, build_tags_payload generates the bytes array
33
- # that will be inserted into messages
34
- # @see https://github.com/DataDog/datadog-log-agent/blob/2394da8c79a6cadbcd1e98d6c89c437becec2732/pkg/config/integration_config.go#L180
35
- def build_tags_payload(config_tags:, source:, source_category:)
36
- payload = ''
37
-
38
- payload = "[dd ddsource=\"#{source}\"]" if !source.nil? && source != ''
39
-
40
- if !source_category.nil? && source_category != ''
41
- payload = "#{payload}[dd ddsourcecategory=\"#{source_category}\"]"
42
- end
43
-
44
- if !config_tags.nil? && config_tags != ''
45
- config_tags = config_tags.join(',') if config_tags.is_a? ::Array
46
- payload = "#{payload}[dd ddtags=\"#{config_tags}\"]"
47
- end
48
-
49
- payload
50
- end
51
-
52
- # https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/processor/processor.go#L65
53
- def build_extra_content(timestamp:, hostname:, service:, tags_payload:)
54
- "<46>0 #{timestamp} #{hostname} #{service} - - #{tags_payload}"
55
- end
56
-
57
- def build_api_key_str(api_key:, logset:)
58
- if !logset.nil? && logset != ''
59
- "#{api_key}/#{logset}"
60
- else
61
- api_key
62
- end
63
- end
64
-
65
- # build_payload returns a processed payload from a raw message
66
- # @param [String] api_key_str
67
- # @param [String] extra_content
68
- # @param [String] msg
69
- def create_payload(api_key_str:, msg:, extra_content:)
70
- "#{api_key_str} #{extra_content} #{msg}\n"
71
- end
72
-
73
- class Client
74
- include ::Datadog::Log
75
-
76
- def initialize(log_dd_url: 'intake.logs.datadoghq.com', log_dd_port: 10516, api_key:, hostname:, skip_ssl_validation: false)
77
- @log_dd_url = log_dd_url
78
- @log_dd_port = log_dd_port
79
- @api_key = api_key
80
- @hostname = hostname
81
- @skip_ssl_validation = skip_ssl_validation
82
-
83
- init_api_client
84
- end
85
-
86
- def send_payload(logset: 'main', msg:, datetime: nil, service:, source:, source_category:, tags:)
87
- datetime = DateTime.now if datetime.nil?
88
-
89
- # new_offset(0) is required. otherwise datadog will silently throws away the log..
90
- timestamp_str = datetime.new_offset(0).rfc3339(6)
91
- payload = create_payload(
92
- api_key_str: build_api_key_str(api_key: @api_key, logset: logset),
93
- msg: truncate_message(msg),
94
- extra_content: build_extra_content(
95
- timestamp: timestamp_str,
96
- hostname: @hostname,
97
- service: service,
98
- tags_payload: build_tags_payload(
99
- config_tags: tags,
100
- source: source,
101
- source_category: source_category
102
- )
103
- )
104
- )
105
- @conn.retry_on_connection_failure do
106
- @conn.write(payload)
107
- end
108
- payload
109
- end
110
-
111
- def shutdown
112
- @conn.close unless @conn.nil?
113
- end
114
-
115
- class << self
116
- def from_env
117
- new(api_key: ENV['DD_LOG_API_KEY'], hostname: Socket.gethostname)
118
- end
119
- end
120
-
121
- private
122
-
123
- def init_api_client
124
- ssl = true
125
- ssl = { verify_mode: OpenSSL::SSL::VERIFY_NONE } if @skip_ssl_validation
126
- server = "#{@log_dd_url}:#{@log_dd_port}"
127
- @conn = Net::TCPClient.new(server: server, ssl: ssl)
128
- end
129
- end
130
- end
131
- end
@@ -1,584 +0,0 @@
1
- # Copyright 2017 Yusuke KUOKA All rights reserved.
2
- #
3
- # Licensed under the Apache License, Version 2.0 (the "License");
4
- # you may not use this file except in compliance with the License.
5
- # You may obtain a copy of the License at
6
- #
7
- # http://www.apache.org/licenses/LICENSE-2.0
8
- #
9
- # Unless required by applicable law or agreed to in writing, software
10
- # distributed under the License is distributed on an "AS IS" BASIS,
11
- # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
- # See the License for the specific language governing permissions and
13
- # limitations under the License.
14
- require 'erb'
15
- require 'json'
16
- require 'open-uri'
17
- require 'socket'
18
- require 'time'
19
- require 'yaml'
20
- require 'fluent/plugin/output'
21
- require 'datadog/log'
22
-
23
- require_relative 'monitoring'
24
-
25
- module Fluent::Plugin
26
- # fluentd output plugin for the Datadog Log Intake API
27
- class DatadogOutput < ::Fluent::Plugin::Output
28
- Fluent::Plugin.register_output('datadog_log', self)
29
-
30
- helpers :compat_parameters, :inject
31
-
32
- include ::Datadog::Log
33
-
34
- DEFAULT_BUFFER_TYPE = 'memory'
35
-
36
- PLUGIN_NAME = 'Fluentd Datadog plugin'
37
- PLUGIN_VERSION = '0.1.0'
38
-
39
- # Address of the metadata service.
40
- METADATA_SERVICE_ADDR = '169.254.169.254'
41
-
42
- # Disable this warning to conform to fluentd config_param conventions.
43
- # rubocop:disable Style/HashSyntax
44
-
45
- # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/logagent/logsagent.go#L26-L30
46
- # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/config/config.go#L52-L56
47
- config_param :log_dd_url, :string, default: 'intake.logs.datadoghq.com'
48
- config_param :log_dd_port, :integer, default: 10516
49
- config_param :skip_ssl_validation, default: false
50
- config_param :api_key, :string, default: ''
51
- config_param :logset, :string, default: 'main'
52
-
53
- # e.g. ['env:prod', 'app:myapp']
54
- # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/logagent/etc/conf.d/integration.yaml.example
55
- config_param :tags, :array, default: [], value_type: :string
56
- config_param :service, :string, default: '-'
57
- # e.g. 'nginx'
58
- config_param :source, :string, default: ''
59
- config_param :source_category, :string, default: ''
60
-
61
- config_section :buffer do
62
- config_set_default :@type, DEFAULT_BUFFER_TYPE
63
- end
64
-
65
- # e.g. 'http_access'
66
- # config_param :source_category, :string, default: ''
67
-
68
- # Specify project/instance metadata.
69
- #
70
- # project_id, zone, and vm_id are required to have valid values, which
71
- # can be obtained from the metadata service or set explicitly.
72
- # Otherwise, the plugin will fail to initialize.
73
- #
74
- # Note that while 'project id' properly refers to the alphanumeric name
75
- # of the project, the logging service will also accept the project number,
76
- # so either one is acceptable in this context.
77
- #
78
- # Whether to attempt to obtain metadata from the local metadata service.
79
- # It is safe to specify 'true' even on platforms with no metadata service.
80
- config_param :use_metadata_service, :bool, :default => true
81
- # These parameters override any values obtained from the metadata service.
82
- config_param :project_id, :string, :default => nil
83
- config_param :zone, :string, :default => nil
84
- config_param :vm_id, :string, :default => nil
85
- config_param :vm_name, :string, :default => nil
86
-
87
- # TODO: Correlate log messages to corresponding Datadog APM spans
88
- # config_param :trace_key, :string, :default => DEFAULT_TRACE_KEY
89
-
90
- # Whether to try to detect if the record is a text log entry with JSON
91
- # content that needs to be parsed.
92
- config_param :detect_json, :bool, :default => false
93
-
94
- # Whether to reject log entries with invalid tags. If this option is set to
95
- # false, tags will be made valid by converting any non-string tag to a
96
- # string, and sanitizing any non-utf8 or other invalid characters.
97
- config_param :require_valid_tags, :bool, :default => false
98
-
99
- # Whether to allow non-UTF-8 characters in user logs. If set to true, any
100
- # non-UTF-8 character would be replaced by the string specified by
101
- # 'non_utf8_replacement_string'. If set to false, any non-UTF-8 character
102
- # would trigger the plugin to error out.
103
- config_param :coerce_to_utf8, :bool, :default => true
104
-
105
- # If 'coerce_to_utf8' is set to true, any non-UTF-8 character would be
106
- # replaced by the string specified here.
107
- config_param :non_utf8_replacement_string, :string, :default => ' '
108
-
109
- # Whether to collect metrics about the plugin usage. The mechanism for
110
- # collecting and exposing metrics is controlled by the monitoring_type
111
- # parameter.
112
- config_param :enable_monitoring, :bool, :default => false
113
- config_param :monitoring_type, :string, :default => 'prometheus'
114
-
115
- # rubocop:enable Style/HashSyntax
116
-
117
- attr_reader :zone
118
- attr_reader :vm_id
119
-
120
- def initialize
121
- super
122
- # use the global logger
123
- @log = $log # rubocop:disable Style/GlobalVars
124
- end
125
-
126
- def configure(conf)
127
- compat_parameters_convert(conf, :buffer, :inject)
128
- super
129
-
130
- if @api_key.size == 0
131
- @api_key = ENV['DD_API_KEY']
132
- if @api_key == '' || @api_key.nil?
133
- error_message = 'Unable to obtain api_key from DD_API_KEY'
134
- fail Fluent::ConfigError, error_message
135
- end
136
- end
137
-
138
- # If monitoring is enabled, register metrics in the default registry
139
- # and store metric objects for future use.
140
- if @enable_monitoring
141
- registry = Monitoring::MonitoringRegistryFactory.create @monitoring_type
142
- @successful_requests_count = registry.counter(
143
- :datadog_successful_requests_count,
144
- 'A number of successful requests to the Datadog Log Intake API')
145
- @failed_requests_count = registry.counter(
146
- :datadog_failed_requests_count,
147
- 'A number of failed requests to the Datadog Log Intake API,'\
148
- ' broken down by the error code')
149
- @ingested_entries_count = registry.counter(
150
- :datadog_ingested_entries_count,
151
- 'A number of log entries ingested by Datadog Log Intake')
152
- @dropped_entries_count = registry.counter(
153
- :datadog_dropped_entries_count,
154
- 'A number of log entries dropped by the Stackdriver output plugin')
155
- @retried_entries_count = registry.counter(
156
- :datadog_retried_entries_count,
157
- 'The number of log entries that failed to be ingested by the'\
158
- ' Stackdriver output plugin due to a transient error and were'\
159
- ' retried')
160
- end
161
-
162
- @platform = detect_platform
163
-
164
- # Set required variables: @project_id, @vm_id, @vm_name and @zone.
165
- set_required_metadata_variables
166
-
167
- @default_tags = build_default_tags
168
-
169
- # The resource and labels are now set up; ensure they can't be modified
170
- # without first duping them.
171
- @default_tags.freeze
172
-
173
- # Log an informational message containing the Logs viewer URL
174
- @log.info 'Logs viewer address: https://example.com/logs/'
175
- end
176
-
177
- def start
178
- super
179
- init_api_client
180
- @successful_call = false
181
- @timenanos_warning = false
182
- end
183
-
184
- def shutdown
185
- super
186
- @conn.shutdown
187
- end
188
-
189
- def format(tag, time, record)
190
- record = inject_values_to_record(tag, time, record)
191
- [tag, time, record].to_msgpack
192
- end
193
-
194
- def formatted_to_msgpack_binary?
195
- true
196
- end
197
-
198
- def multi_workers_ready?
199
- true
200
- end
201
-
202
- def write(chunk)
203
- each_valid_record(chunk) do |_tag, time, record|
204
- if @detect_json
205
- # Save the timestamp and severity if available, then clear it out to
206
- # allow for determining whether we should parse the log or message
207
- # field.
208
- timestamp = record.delete('time')
209
- severity = record.delete('severity')
210
-
211
- # If the log is json, we want to export it as a structured log
212
- # unless there is additional metadata that would be lost.
213
- record_json = nil
214
- if record.length == 1
215
- %w(log message msg).each do |field|
216
- if record.key?(field)
217
- record_json = parse_json_or_nil(record[field])
218
- end
219
- end
220
- end
221
- record = record_json unless record_json.nil?
222
- # Restore timestamp and severity if necessary. Note that we don't
223
- # want to override these keys in the JSON we've just parsed.
224
- record['time'] ||= timestamp if timestamp
225
- record['severity'] ||= severity if severity
226
- end
227
-
228
- # TODO: Correlate Datadog APM spans with log messages
229
- # fq_trace_id = record.delete(@trace_key)
230
- # entry.trace = fq_trace_id if fq_trace_id
231
-
232
- begin
233
- msg = nil
234
- %w(log message msg).each do |field|
235
- msg = record[field] if record.key?(field)
236
- end
237
-
238
- tags = []
239
-
240
- kube = record['kubernetes'] || {}
241
-
242
- mappings = {
243
- 'pod_name' => 'pod_name',
244
- 'container_name' => 'container_name',
245
- 'namespace_name' => 'kube_namespace'
246
- }
247
-
248
- mappings.each do |json_key, tag_key|
249
- tags << "#{tag_key}:#{kube[json_key]}" if kube.key? json_key
250
- end
251
-
252
- kube_labels = kube['labels']
253
- unless kube_labels.nil?
254
- kube_labels.each do |k, v|
255
- k2 = k.dup
256
- k2.gsub!(/[\,\.]/, '_')
257
- k2.gsub!(%r{/}, '-')
258
- tags << "kube_#{k2}:#{v}"
259
- end
260
- end
261
-
262
- @log.debug 'Dumping kubernetes metadata', metadata: kube
263
-
264
- annotations = kube['annotations']
265
- unless annotations.nil?
266
- created_by_str = annotations['kubernetes_io/created-by']
267
- unless created_by_str.nil?
268
- created_by = JSON.parse(created_by_str)
269
- ref = created_by['reference'] unless created_by.nil?
270
- kind = ref['kind'] unless ref.nil?
271
- name = ref['name'] unless ref.nil?
272
- kind = kind.downcase unless kind.nil?
273
- tags << "kube_#{kind}:#{name}" if !kind.nil? && !name.nil?
274
- end
275
- end
276
-
277
- # TODO: Include K8S tags like
278
- # - kube_daemon_set=$daemonset_name
279
- # - kube_deployment=$deployment_name
280
- # - kube_replica_set=$replicaset_name
281
- # -
282
-
283
- tags.concat(@default_tags)
284
-
285
- unless kube_labels.nil?
286
- service = kube_labels['app'] ||
287
- kube_labels['k8s-app']
288
- end
289
- source = kube['pod_name']
290
- source_category = kube['container_name']
291
-
292
- service = @service if service.nil?
293
- source = @source if source.nil?
294
- source_category = @source_category if source_category.nil?
295
-
296
- datetime = Time.at(Fluent::EventTime.new(time).to_r).utc.to_datetime
297
-
298
- payload =
299
- @conn.send_payload(
300
- logset: @logset,
301
- msg: msg,
302
- datetime: datetime,
303
- service: service,
304
- source: source,
305
- source_category: source_category,
306
- tags: tags
307
- )
308
-
309
- entries_count = 1
310
- @log.debug 'Sent payload to Datadog.', payload: payload
311
- increment_successful_requests_count
312
- increment_ingested_entries_count(entries_count)
313
-
314
- # Let the user explicitly know when the first call succeeded, to aid
315
- # with verification and troubleshooting.
316
- unless @successful_call
317
- @successful_call = true
318
- @log.info 'Successfully sent to Datadog.'
319
- end
320
-
321
- rescue => error
322
- increment_failed_requests_count
323
- if entries_count.nil?
324
- increment_dropped_entries_count(1)
325
- @log.error 'Not retrying a log message later',
326
- error: error.to_s, error_class: error.class
327
- else
328
- increment_retried_entries_count(entries_count)
329
- # RPC cancelled, so retry via re-raising the error.
330
- @log.debug "Retrying #{entries_count} log message(s) later.",
331
- error: error.to_s
332
- raise error
333
- end
334
- end
335
- end
336
- end
337
-
338
- private
339
-
340
- def init_api_client
341
- @conn = ::Datadog::Log::Client.new(
342
- log_dd_url: @log_dd_url,
343
- log_dd_port: @log_dd_port,
344
- api_key: @api_key,
345
- hostname: @vm_id,
346
- skip_ssl_validation: @skip_ssl_validation
347
- )
348
- end
349
-
350
- def parse_json_or_nil(input)
351
- # Only here to please rubocop...
352
- return nil if input.nil?
353
-
354
- input.each_codepoint do |c|
355
- if c == 123
356
- # left curly bracket (U+007B)
357
- begin
358
- return JSON.parse(input)
359
- rescue JSON::ParserError
360
- return nil
361
- end
362
- else
363
- # Break (and return nil) unless the current character is whitespace,
364
- # in which case we continue to look for a left curly bracket.
365
- # Whitespace as per the JSON spec are: tabulation (U+0009),
366
- # line feed (U+000A), carriage return (U+000D), and space (U+0020).
367
- break unless c == 9 || c == 10 || c == 13 || c == 32
368
- end # case
369
- end # do
370
- nil
371
- end
372
-
373
- # "enum" of Platform values
374
- module Platform
375
- OTHER = 0 # Other/unkown platform
376
- GCE = 1 # Google Compute Engine
377
- EC2 = 2 # Amazon EC2
378
- end
379
-
380
- # Determine what platform we are running on by consulting the metadata
381
- # service (unless the user has explicitly disabled using that).
382
- def detect_platform
383
- unless @use_metadata_service
384
- @log.info 'use_metadata_service is false; not detecting platform'
385
- return Platform::OTHER
386
- end
387
-
388
- begin
389
- open('http://' + METADATA_SERVICE_ADDR) do |f|
390
- if f.meta['metadata-flavor'] == 'Google'
391
- @log.info 'Detected GCE platform'
392
- return Platform::GCE
393
- end
394
- if f.meta['server'] == 'EC2ws'
395
- @log.info 'Detected EC2 platform'
396
- return Platform::EC2
397
- end
398
- end
399
- rescue StandardError => e
400
- @log.error 'Failed to access metadata service: ', error: e
401
- end
402
-
403
- @log.info 'Unable to determine platform'
404
- Platform::OTHER
405
- end
406
-
407
- def fetch_gce_metadata(metadata_path)
408
- fail "Called fetch_gce_metadata with platform=#{@platform}" unless
409
- @platform == Platform::GCE
410
- # See https://cloud.google.com/compute/docs/metadata
411
- open('http://' + METADATA_SERVICE_ADDR + '/computeMetadata/v1/' +
412
- metadata_path, 'Metadata-Flavor' => 'Google', &:read)
413
- end
414
-
415
- # EC2 Metadata server returns everything in one call. Store it after the
416
- # first fetch to avoid making multiple calls.
417
- def ec2_metadata
418
- fail "Called ec2_metadata with platform=#{@platform}" unless
419
- @platform == Platform::EC2
420
- unless @ec2_metadata
421
- # See http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html
422
- open('http://' + METADATA_SERVICE_ADDR +
423
- '/latest/dynamic/instance-identity/document') do |f|
424
- contents = f.read
425
- @ec2_metadata = JSON.parse(contents)
426
- end
427
- end
428
-
429
- @ec2_metadata
430
- end
431
-
432
- # Set required variables like @vm_id, @vm_name and @zone.
433
- def set_required_metadata_variables
434
- set_vm_id
435
- set_vm_name
436
- set_zone
437
-
438
- # All metadata parameters must now be set.
439
- missing = []
440
- missing << 'zone' unless @zone
441
- missing << 'vm_id' unless @vm_id
442
- missing << 'vm_name' unless @vm_name
443
- return if missing.empty?
444
- fail Fluent::ConfigError, 'Unable to obtain metadata parameters: ' +
445
- missing.join(' ')
446
- end
447
-
448
- # 1. Return the value if it is explicitly set in the config already.
449
- # 2. If not, try to retrieve it by calling metadata servers directly.
450
- def set_vm_id
451
- @vm_id ||= ec2_metadata['instanceId'] if @platform == Platform::EC2
452
- rescue StandardError => e
453
- @log.error 'Failed to obtain vm_id: ', error: e
454
- end
455
-
456
- # 1. Return the value if it is explicitly set in the config already.
457
- # 2. If not, try to retrieve it locally.
458
- def set_vm_name
459
- @vm_name ||= Socket.gethostname
460
- rescue StandardError => e
461
- @log.error 'Failed to obtain vm name: ', error: e
462
- end
463
-
464
- # 1. Return the value if it is explicitly set in the config already.
465
- # 2. If not, try to retrieve it locally.
466
- def set_zone
467
- @zone ||= 'aws:' + ec2_metadata['availabilityZone'] if
468
- @platform == Platform::EC2 && ec2_metadata.key?('availabilityZone')
469
- rescue StandardError => e
470
- @log.error 'Failed to obtain location: ', error: e
471
- end
472
-
473
- # Determine agent level monitored resource labels based on the resource
474
- # type. Each resource type has its own labels that need to be filled in.
475
- def build_default_tags
476
- aws_account_id = ec2_metadata['accountId'] if
477
- ec2_metadata.key?('accountId')
478
- # #host:i-09fbfed2672d2c6bf
479
- %W(host:#{@vm_id} zone:#{@zone} aws_account_id:#{aws_account_id})
480
- .concat @tags
481
- end
482
-
483
- # Filter out invalid non-Hash entries.
484
- def each_valid_record(chunk)
485
- chunk.msgpack_each do |event|
486
- record = event.last
487
- unless record.is_a?(Hash)
488
- @log.warn 'Dropping log entries with malformed record: ' \
489
- "'#{record.inspect}'. " \
490
- 'A log record should be in JSON format.'
491
- next
492
- end
493
- tag = record.first
494
- sanitized_tag = sanitize_tag(tag)
495
- if sanitized_tag.nil?
496
- @log.warn "Dropping log entries with invalid tag: '#{tag.inspect}'." \
497
- ' A tag should be a string with utf8 characters.'
498
- next
499
- end
500
- yield event
501
- end
502
- end
503
-
504
- # Given a tag, returns the corresponding valid tag if possible, or nil if
505
- # the tag should be rejected. If 'require_valid_tags' is false, non-string
506
- # tags are converted to strings, and invalid characters are sanitized;
507
- # otherwise such tags are rejected.
508
- def sanitize_tag(tag)
509
- if @require_valid_tags &&
510
- (!tag.is_a?(String) || tag == '' || convert_to_utf8(tag) != tag)
511
- return nil
512
- end
513
- tag = convert_to_utf8(tag.to_s)
514
- tag = '_' if tag == ''
515
- tag
516
- end
517
-
518
- # Encode as UTF-8. If 'coerce_to_utf8' is set to true in the config, any
519
- # non-UTF-8 character would be replaced by the string specified by
520
- # 'non_utf8_replacement_string'. If 'coerce_to_utf8' is set to false, any
521
- # non-UTF-8 character would trigger the plugin to error out.
522
- def convert_to_utf8(input)
523
- if @coerce_to_utf8
524
- input.encode(
525
- 'utf-8',
526
- invalid: :replace,
527
- undef: :replace,
528
- replace: @non_utf8_replacement_string)
529
- else
530
- begin
531
- input.encode('utf-8')
532
- rescue EncodingError
533
- @log.error 'Encountered encoding issues potentially due to non ' \
534
- 'UTF-8 characters. To allow non-UTF-8 characters and ' \
535
- 'replace them with spaces, please set "coerce_to_utf8" ' \
536
- 'to true.'
537
- raise
538
- end
539
- end
540
- end
541
-
542
- def ensure_array(value)
543
- Array.try_convert(value) || (fail JSON::ParserError, "#{value.class}")
544
- end
545
-
546
- def ensure_hash(value)
547
- Hash.try_convert(value) || (fail JSON::ParserError, "#{value.class}")
548
- end
549
-
550
- # Increment the metric for the number of successful requests.
551
- def increment_successful_requests_count
552
- return unless @successful_requests_count
553
- @successful_requests_count.increment
554
- end
555
-
556
- # Increment the metric for the number of failed requests, labeled by
557
- # the provided status code.
558
- def increment_failed_requests_count
559
- return unless @failed_requests_count
560
- @failed_requests_count.increment
561
- end
562
-
563
- # Increment the metric for the number of log entries, successfully
564
- # ingested by the Datadog Log Intake API.
565
- def increment_ingested_entries_count(count)
566
- return unless @ingested_entries_count
567
- @ingested_entries_count.increment({}, count)
568
- end
569
-
570
- # Increment the metric for the number of log entries that were dropped
571
- # and not ingested by the Datadog Log Intake API.
572
- def increment_dropped_entries_count(count)
573
- return unless @dropped_entries_count
574
- @dropped_entries_count.increment({}, count)
575
- end
576
-
577
- # Increment the metric for the number of log entries that were dropped
578
- # and not ingested by the Datadog Log Intake API.
579
- def increment_retried_entries_count(count)
580
- return unless @retried_entries_count
581
- @retried_entries_count.increment({}, count)
582
- end
583
- end
584
- end