fluent-plugin-datadog-log 0.1.0.rc10 → 0.1.0.rc11

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 8c9df2b279c5e9048bd66cd3a8745b846b04052e
4
- data.tar.gz: 2b2c3fbd224a2999057149073662157030b0c8e2
3
+ metadata.gz: 2c4be0d80cdbe47b400bc5c04eee649dd189ca90
4
+ data.tar.gz: 8e739dc15a66b3022bafd99a13a12bc28d851cdc
5
5
  SHA512:
6
- metadata.gz: bd31ec06397133d8720f8e44c4775f2d0b6bdfe494b79407fddc2589f314b5eecc5d9cbc72c3a3f05f47d93b52349565d2dc98892e480aba6f75819129c7ecb9
7
- data.tar.gz: f4d69d86c46a88f2bb067dbbcaa42be29c313a8cc39c0e93174078a40958c88ec6c44b9987638651aac23d7e739f44732f5adb9c1e3b57c78694fdb68aca115e
6
+ metadata.gz: 64f91253df6651a79c451ad2fd70f42f3c6dc73cef39ec8e05b6c99c31e04644577760e8c3bc7ab81e153f4e309bc944408658525a5072da3e0a95aef7ccd2a9
7
+ data.tar.gz: 1eae9b9df15196726c663d72859ff5db3f8ba45b148470d00c2d4ec0e44443d09c75bf2bd870075b8139ba64df5d2b237eb684e391763d6be2c06cda19e2f08e
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- fluent-plugin-datadog-log (0.1.0.rc10)
5
- fluentd (~> 0.14)
4
+ fluent-plugin-datadog-log (0.1.0.rc11)
5
+ fluentd (~> 1.0.0)
6
6
  json (~> 1.8)
7
7
  net_tcp_client (~> 2.0.1)
8
8
  prometheus-client (~> 0.7.1)
@@ -20,7 +20,7 @@ GEM
20
20
  cool.io (1.5.3)
21
21
  crack (0.4.3)
22
22
  safe_yaml (~> 1.0.0)
23
- fluentd (0.14.24)
23
+ fluentd (1.0.0)
24
24
  cool.io (>= 1.4.5, < 2.0.0)
25
25
  http_parser.rb (>= 0.5.1, < 0.7.0)
26
26
  msgpack (>= 0.7.0, < 2.0.0)
@@ -37,15 +37,15 @@ GEM
37
37
  metaclass (0.0.4)
38
38
  mocha (1.3.0)
39
39
  metaclass (~> 0.0.1)
40
- msgpack (1.1.0)
40
+ msgpack (1.2.0)
41
41
  net_tcp_client (2.0.1)
42
- parser (2.4.0.0)
43
- ast (~> 2.2)
42
+ parser (2.4.0.2)
43
+ ast (~> 2.3)
44
44
  power_assert (1.1.1)
45
45
  powerpack (0.1.1)
46
46
  prometheus-client (0.7.1)
47
47
  quantile (~> 0.2.0)
48
- public_suffix (3.0.0)
48
+ public_suffix (3.0.1)
49
49
  quantile (0.2.0)
50
50
  rainbow (2.2.2)
51
51
  rake
@@ -8,7 +8,7 @@ eos
8
8
  gem.homepage = \
9
9
  'https://github.com/mumoshu/fluent-plugin-datadog-log'
10
10
  gem.license = 'Apache-2.0'
11
- gem.version = '0.1.0.rc10'
11
+ gem.version = '0.1.0.rc11'
12
12
  gem.authors = ['Yusuke KUOKA']
13
13
  gem.email = ['ykuoka@gmail.com']
14
14
  gem.required_ruby_version = Gem::Requirement.new('>= 2.0')
@@ -17,7 +17,7 @@ eos
17
17
  gem.test_files = gem.files.grep(/^(test)/)
18
18
  gem.require_paths = ['lib']
19
19
 
20
- gem.add_runtime_dependency 'fluentd', '~> 0.14'
20
+ gem.add_runtime_dependency 'fluentd', '~> 1.0.0'
21
21
  # gem.add_runtime_dependency 'datadog-log-api-client', '~> 0.1'
22
22
  gem.add_runtime_dependency 'json', '~> 1.8'
23
23
 
@@ -0,0 +1,586 @@
1
+ # Copyright 2017 Yusuke KUOKA All rights reserved.
2
+ #
3
+ # Licensed under the Apache License, Version 2.0 (the "License");
4
+ # you may not use this file except in compliance with the License.
5
+ # You may obtain a copy of the License at
6
+ #
7
+ # http://www.apache.org/licenses/LICENSE-2.0
8
+ #
9
+ # Unless required by applicable law or agreed to in writing, software
10
+ # distributed under the License is distributed on an "AS IS" BASIS,
11
+ # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ # See the License for the specific language governing permissions and
13
+ # limitations under the License.
14
+ require 'erb'
15
+ require 'json'
16
+ require 'open-uri'
17
+ require 'socket'
18
+ require 'time'
19
+ require 'yaml'
20
+ require 'fluent/plugin/output'
21
+ require 'datadog/log'
22
+
23
+ require_relative 'monitoring'
24
+
25
+ module Fluent::Plugin
26
+ # fluentd output plugin for the Datadog Log Intake API
27
+ class DatadogOutput < ::Fluent::Plugin::Output
28
+ Fluent::Plugin.register_output('datadog_log', self)
29
+
30
+ helpers :compat_parameters, :inject
31
+
32
+ include ::Datadog::Log
33
+
34
+ DEFAULT_BUFFER_TYPE = 'memory'
35
+
36
+ PLUGIN_NAME = 'Fluentd Datadog plugin'
37
+ PLUGIN_VERSION = '0.1.0'
38
+
39
+ # Address of the metadata service.
40
+ METADATA_SERVICE_ADDR = '169.254.169.254'
41
+
42
+ # Disable this warning to conform to fluentd config_param conventions.
43
+ # rubocop:disable Style/HashSyntax
44
+
45
+ # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/logagent/logsagent.go#L26-L30
46
+ # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/config/config.go#L52-L56
47
+ config_param :log_dd_url, :string, default: 'intake.logs.datadoghq.com'
48
+ config_param :log_dd_port, :integer, default: 10516
49
+ config_param :skip_ssl_validation, default: false
50
+ config_param :api_key, :string, default: ''
51
+ config_param :logset, :string, default: 'main'
52
+
53
+ # e.g. ['env:prod', 'app:myapp']
54
+ # see https://github.com/DataDog/datadog-log-agent/blob/db13b53dfdd036d43acfb15089a43eb31548f09f/pkg/logagent/etc/conf.d/integration.yaml.example
55
+ config_param :tags, :array, default: [], value_type: :string
56
+ config_param :service, :string, default: '-'
57
+ # e.g. 'nginx'
58
+ config_param :source, :string, default: ''
59
+ config_param :source_category, :string, default: ''
60
+
61
+ config_section :buffer do
62
+ config_set_default :@type, DEFAULT_BUFFER_TYPE
63
+ end
64
+
65
+ # e.g. 'http_access'
66
+ # config_param :source_category, :string, default: ''
67
+
68
+ # Specify project/instance metadata.
69
+ #
70
+ # project_id, zone, and vm_id are required to have valid values, which
71
+ # can be obtained from the metadata service or set explicitly.
72
+ # Otherwise, the plugin will fail to initialize.
73
+ #
74
+ # Note that while 'project id' properly refers to the alphanumeric name
75
+ # of the project, the logging service will also accept the project number,
76
+ # so either one is acceptable in this context.
77
+ #
78
+ # Whether to attempt to obtain metadata from the local metadata service.
79
+ # It is safe to specify 'true' even on platforms with no metadata service.
80
+ config_param :use_metadata_service, :bool, :default => true
81
+ # These parameters override any values obtained from the metadata service.
82
+ config_param :project_id, :string, :default => nil
83
+ config_param :zone, :string, :default => nil
84
+ config_param :vm_id, :string, :default => nil
85
+ config_param :vm_name, :string, :default => nil
86
+
87
+ # TODO: Correlate log messages to corresponding Datadog APM spans
88
+ # config_param :trace_key, :string, :default => DEFAULT_TRACE_KEY
89
+
90
+ # Whether to try to detect if the record is a text log entry with JSON
91
+ # content that needs to be parsed.
92
+ config_param :detect_json, :bool, :default => false
93
+
94
+ # Whether to reject log entries with invalid tags. If this option is set to
95
+ # false, tags will be made valid by converting any non-string tag to a
96
+ # string, and sanitizing any non-utf8 or other invalid characters.
97
+ config_param :require_valid_tags, :bool, :default => false
98
+
99
+ # Whether to allow non-UTF-8 characters in user logs. If set to true, any
100
+ # non-UTF-8 character would be replaced by the string specified by
101
+ # 'non_utf8_replacement_string'. If set to false, any non-UTF-8 character
102
+ # would trigger the plugin to error out.
103
+ config_param :coerce_to_utf8, :bool, :default => true
104
+
105
+ # If 'coerce_to_utf8' is set to true, any non-UTF-8 character would be
106
+ # replaced by the string specified here.
107
+ config_param :non_utf8_replacement_string, :string, :default => ' '
108
+
109
+ # Whether to collect metrics about the plugin usage. The mechanism for
110
+ # collecting and exposing metrics is controlled by the monitoring_type
111
+ # parameter.
112
+ config_param :enable_monitoring, :bool, :default => false
113
+ config_param :monitoring_type, :string, :default => 'prometheus'
114
+
115
+ # rubocop:enable Style/HashSyntax
116
+
117
+ attr_reader :zone
118
+ attr_reader :vm_id
119
+
120
+ def initialize
121
+ super
122
+ # use the global logger
123
+ @log = $log # rubocop:disable Style/GlobalVars
124
+ end
125
+
126
+ def configure(conf)
127
+ compat_parameters_convert(conf, :buffer, :inject)
128
+ super
129
+
130
+ if @api_key.size == 0
131
+ @api_key = ENV['DD_API_KEY']
132
+ if @api_key == '' || @api_key.nil?
133
+ error_message = 'Unable to obtain api_key from DD_API_KEY'
134
+ fail Fluent::ConfigError, error_message
135
+ end
136
+ end
137
+
138
+ # If monitoring is enabled, register metrics in the default registry
139
+ # and store metric objects for future use.
140
+ if @enable_monitoring
141
+ registry = Monitoring::MonitoringRegistryFactory.create @monitoring_type
142
+ @successful_requests_count = registry.counter(
143
+ :datadog_successful_requests_count,
144
+ 'A number of successful requests to the Datadog Log Intake API')
145
+ @failed_requests_count = registry.counter(
146
+ :datadog_failed_requests_count,
147
+ 'A number of failed requests to the Datadog Log Intake API,'\
148
+ ' broken down by the error code')
149
+ @ingested_entries_count = registry.counter(
150
+ :datadog_ingested_entries_count,
151
+ 'A number of log entries ingested by Datadog Log Intake')
152
+ @dropped_entries_count = registry.counter(
153
+ :datadog_dropped_entries_count,
154
+ 'A number of log entries dropped by the Stackdriver output plugin')
155
+ @retried_entries_count = registry.counter(
156
+ :datadog_retried_entries_count,
157
+ 'The number of log entries that failed to be ingested by the'\
158
+ ' Stackdriver output plugin due to a transient error and were'\
159
+ ' retried')
160
+ end
161
+
162
+ @platform = detect_platform
163
+
164
+ # Set required variables: @project_id, @vm_id, @vm_name and @zone.
165
+ set_required_metadata_variables
166
+
167
+ @default_tags = build_default_tags
168
+
169
+ # The resource and labels are now set up; ensure they can't be modified
170
+ # without first duping them.
171
+ @default_tags.freeze
172
+
173
+ # Log an informational message containing the Logs viewer URL
174
+ @log.info 'Logs viewer address: https://example.com/logs/'
175
+ end
176
+
177
+ def start
178
+ super
179
+ init_api_client
180
+ @successful_call = false
181
+ @timenanos_warning = false
182
+ end
183
+
184
+ def shutdown
185
+ super
186
+ @conn.shutdown
187
+ end
188
+
189
+ def format(tag, time, record)
190
+ record = inject_values_to_record(tag, time, record)
191
+ [tag, time, record].to_msgpack
192
+ end
193
+
194
+ def formatted_to_msgpack_binary?
195
+ true
196
+ end
197
+
198
+ def multi_workers_ready?
199
+ true
200
+ end
201
+
202
+ def write(chunk)
203
+ each_valid_record(chunk) do |_tag, time, record|
204
+ if @detect_json
205
+ # Save the timestamp and severity if available, then clear it out to
206
+ # allow for determining whether we should parse the log or message
207
+ # field.
208
+ timestamp = record.delete('time')
209
+ severity = record.delete('severity')
210
+
211
+ # If the log is json, we want to export it as a structured log
212
+ # unless there is additional metadata that would be lost.
213
+ record_json = nil
214
+ if record.length == 1
215
+ %w(log message msg).each do |field|
216
+ if record.key?(field)
217
+ record_json = parse_json_or_nil(record[field])
218
+ end
219
+ end
220
+ end
221
+ record = record_json unless record_json.nil?
222
+ # Restore timestamp and severity if necessary. Note that we don't
223
+ # want to override these keys in the JSON we've just parsed.
224
+ record['time'] ||= timestamp if timestamp
225
+ record['severity'] ||= severity if severity
226
+ end
227
+
228
+ # TODO: Correlate Datadog APM spans with log messages
229
+ # fq_trace_id = record.delete(@trace_key)
230
+ # entry.trace = fq_trace_id if fq_trace_id
231
+
232
+ begin
233
+ msg = nil
234
+ %w(log message msg).each do |field|
235
+ msg = record[field] if record.key?(field)
236
+ end
237
+
238
+ tags = []
239
+
240
+ docker := record['docker'] || {}
241
+
242
+ kube = record['kubernetes'] || {}
243
+
244
+ mappings = {
245
+ 'pod_name' => 'pod_name',
246
+ 'container_name' => 'container_name',
247
+ 'namespace_name' => 'kube_namespace'
248
+ }
249
+
250
+ mappings.each do |json_key, tag_key|
251
+ tags << "#{tag_key}:#{kube[json_key]}" if kube.key? json_key
252
+ end
253
+
254
+ kube_labels = kube['labels']
255
+ unless kube_labels.nil?
256
+ kube_labels.each do |k, v|
257
+ k2 = k.dup
258
+ k2.gsub!(/[\,\.]/, '_')
259
+ k2.gsub!(%r{/}, '-')
260
+ tags << "kube_#{k2}:#{v}"
261
+ end
262
+ end
263
+
264
+ @log.debug 'Dumping kubernetes metadata', metadata: kube
265
+
266
+ annotations = kube['annotations']
267
+ unless annotations.nil?
268
+ created_by_str = annotations['kubernetes_io/created-by']
269
+ unless created_by_str.nil?
270
+ created_by = JSON.parse(created_by_str)
271
+ ref = created_by['reference'] unless created_by.nil?
272
+ kind = ref['kind'] unless ref.nil?
273
+ name = ref['name'] unless ref.nil?
274
+ kind = kind.downcase unless kind.nil?
275
+ tags << "kube_#{kind}:#{name}" if !kind.nil? && !name.nil?
276
+ end
277
+ end
278
+
279
+ # TODO: Include K8S tags like
280
+ # - kube_daemon_set=$daemonset_name
281
+ # - kube_deployment=$deployment_name
282
+ # - kube_replica_set=$replicaset_name
283
+ # -
284
+
285
+ tags.concat(@default_tags)
286
+
287
+ unless kube_labels.nil?
288
+ service = kube_labels['app'] ||
289
+ kube_labels['k8s-app']
290
+ end
291
+ source = kube['pod_name']
292
+ source_category = kube['container_name']
293
+
294
+ service = @service if service.nil?
295
+ source = @source if source.nil?
296
+ source_category = @source_category if source_category.nil?
297
+
298
+ datetime = Time.at(Fluent::EventTime.new(time).to_r).utc.to_datetime
299
+
300
+ payload =
301
+ @conn.send_payload(
302
+ logset: @logset,
303
+ msg: msg,
304
+ datetime: datetime,
305
+ service: service,
306
+ source: source,
307
+ source_category: source_category,
308
+ tags: tags
309
+ )
310
+
311
+ entries_count = 1
312
+ @log.debug 'Sent payload to Datadog.', payload: payload
313
+ increment_successful_requests_count
314
+ increment_ingested_entries_count(entries_count)
315
+
316
+ # Let the user explicitly know when the first call succeeded, to aid
317
+ # with verification and troubleshooting.
318
+ unless @successful_call
319
+ @successful_call = true
320
+ @log.info 'Successfully sent to Datadog.'
321
+ end
322
+
323
+ rescue => error
324
+ increment_failed_requests_count
325
+ if entries_count.nil?
326
+ increment_dropped_entries_count(1)
327
+ @log.error 'Not retrying a log message later',
328
+ error: error.to_s, error_class: error.class
329
+ else
330
+ increment_retried_entries_count(entries_count)
331
+ # RPC cancelled, so retry via re-raising the error.
332
+ @log.debug "Retrying #{entries_count} log message(s) later.",
333
+ error: error.to_s
334
+ raise error
335
+ end
336
+ end
337
+ end
338
+ end
339
+
340
+ private
341
+
342
+ def init_api_client
343
+ @conn = ::Datadog::Log::Client.new(
344
+ log_dd_url: @log_dd_url,
345
+ log_dd_port: @log_dd_port,
346
+ api_key: @api_key,
347
+ hostname: @vm_id,
348
+ skip_ssl_validation: @skip_ssl_validation
349
+ )
350
+ end
351
+
352
+ def parse_json_or_nil(input)
353
+ # Only here to please rubocop...
354
+ return nil if input.nil?
355
+
356
+ input.each_codepoint do |c|
357
+ if c == 123
358
+ # left curly bracket (U+007B)
359
+ begin
360
+ return JSON.parse(input)
361
+ rescue JSON::ParserError
362
+ return nil
363
+ end
364
+ else
365
+ # Break (and return nil) unless the current character is whitespace,
366
+ # in which case we continue to look for a left curly bracket.
367
+ # Whitespace as per the JSON spec are: tabulation (U+0009),
368
+ # line feed (U+000A), carriage return (U+000D), and space (U+0020).
369
+ break unless c == 9 || c == 10 || c == 13 || c == 32
370
+ end # case
371
+ end # do
372
+ nil
373
+ end
374
+
375
+ # "enum" of Platform values
376
+ module Platform
377
+ OTHER = 0 # Other/unkown platform
378
+ GCE = 1 # Google Compute Engine
379
+ EC2 = 2 # Amazon EC2
380
+ end
381
+
382
+ # Determine what platform we are running on by consulting the metadata
383
+ # service (unless the user has explicitly disabled using that).
384
+ def detect_platform
385
+ unless @use_metadata_service
386
+ @log.info 'use_metadata_service is false; not detecting platform'
387
+ return Platform::OTHER
388
+ end
389
+
390
+ begin
391
+ open('http://' + METADATA_SERVICE_ADDR) do |f|
392
+ if f.meta['metadata-flavor'] == 'Google'
393
+ @log.info 'Detected GCE platform'
394
+ return Platform::GCE
395
+ end
396
+ if f.meta['server'] == 'EC2ws'
397
+ @log.info 'Detected EC2 platform'
398
+ return Platform::EC2
399
+ end
400
+ end
401
+ rescue StandardError => e
402
+ @log.error 'Failed to access metadata service: ', error: e
403
+ end
404
+
405
+ @log.info 'Unable to determine platform'
406
+ Platform::OTHER
407
+ end
408
+
409
+ def fetch_gce_metadata(metadata_path)
410
+ fail "Called fetch_gce_metadata with platform=#{@platform}" unless
411
+ @platform == Platform::GCE
412
+ # See https://cloud.google.com/compute/docs/metadata
413
+ open('http://' + METADATA_SERVICE_ADDR + '/computeMetadata/v1/' +
414
+ metadata_path, 'Metadata-Flavor' => 'Google', &:read)
415
+ end
416
+
417
+ # EC2 Metadata server returns everything in one call. Store it after the
418
+ # first fetch to avoid making multiple calls.
419
+ def ec2_metadata
420
+ fail "Called ec2_metadata with platform=#{@platform}" unless
421
+ @platform == Platform::EC2
422
+ unless @ec2_metadata
423
+ # See http://docs.aws.amazon.com/AWSEC2/latest/UserGuide/ec2-instance-metadata.html
424
+ open('http://' + METADATA_SERVICE_ADDR +
425
+ '/latest/dynamic/instance-identity/document') do |f|
426
+ contents = f.read
427
+ @ec2_metadata = JSON.parse(contents)
428
+ end
429
+ end
430
+
431
+ @ec2_metadata
432
+ end
433
+
434
+ # Set required variables like @vm_id, @vm_name and @zone.
435
+ def set_required_metadata_variables
436
+ set_vm_id
437
+ set_vm_name
438
+ set_zone
439
+
440
+ # All metadata parameters must now be set.
441
+ missing = []
442
+ missing << 'zone' unless @zone
443
+ missing << 'vm_id' unless @vm_id
444
+ missing << 'vm_name' unless @vm_name
445
+ return if missing.empty?
446
+ fail Fluent::ConfigError, 'Unable to obtain metadata parameters: ' +
447
+ missing.join(' ')
448
+ end
449
+
450
+ # 1. Return the value if it is explicitly set in the config already.
451
+ # 2. If not, try to retrieve it by calling metadata servers directly.
452
+ def set_vm_id
453
+ @vm_id ||= ec2_metadata['instanceId'] if @platform == Platform::EC2
454
+ rescue StandardError => e
455
+ @log.error 'Failed to obtain vm_id: ', error: e
456
+ end
457
+
458
+ # 1. Return the value if it is explicitly set in the config already.
459
+ # 2. If not, try to retrieve it locally.
460
+ def set_vm_name
461
+ @vm_name ||= Socket.gethostname
462
+ rescue StandardError => e
463
+ @log.error 'Failed to obtain vm name: ', error: e
464
+ end
465
+
466
+ # 1. Return the value if it is explicitly set in the config already.
467
+ # 2. If not, try to retrieve it locally.
468
+ def set_zone
469
+ @zone ||= 'aws:' + ec2_metadata['availabilityZone'] if
470
+ @platform == Platform::EC2 && ec2_metadata.key?('availabilityZone')
471
+ rescue StandardError => e
472
+ @log.error 'Failed to obtain location: ', error: e
473
+ end
474
+
475
+ # Determine agent level monitored resource labels based on the resource
476
+ # type. Each resource type has its own labels that need to be filled in.
477
+ def build_default_tags
478
+ aws_account_id = ec2_metadata['accountId'] if
479
+ ec2_metadata.key?('accountId')
480
+ # #host:i-09fbfed2672d2c6bf
481
+ %W(host:#{@vm_id} zone:#{@zone} aws_account_id:#{aws_account_id})
482
+ .concat @tags
483
+ end
484
+
485
+ # Filter out invalid non-Hash entries.
486
+ def each_valid_record(chunk)
487
+ chunk.msgpack_each do |event|
488
+ record = event.last
489
+ unless record.is_a?(Hash)
490
+ @log.warn 'Dropping log entries with malformed record: ' \
491
+ "'#{record.inspect}'. " \
492
+ 'A log record should be in JSON format.'
493
+ next
494
+ end
495
+ tag = record.first
496
+ sanitized_tag = sanitize_tag(tag)
497
+ if sanitized_tag.nil?
498
+ @log.warn "Dropping log entries with invalid tag: '#{tag.inspect}'." \
499
+ ' A tag should be a string with utf8 characters.'
500
+ next
501
+ end
502
+ yield event
503
+ end
504
+ end
505
+
506
+ # Given a tag, returns the corresponding valid tag if possible, or nil if
507
+ # the tag should be rejected. If 'require_valid_tags' is false, non-string
508
+ # tags are converted to strings, and invalid characters are sanitized;
509
+ # otherwise such tags are rejected.
510
+ def sanitize_tag(tag)
511
+ if @require_valid_tags &&
512
+ (!tag.is_a?(String) || tag == '' || convert_to_utf8(tag) != tag)
513
+ return nil
514
+ end
515
+ tag = convert_to_utf8(tag.to_s)
516
+ tag = '_' if tag == ''
517
+ tag
518
+ end
519
+
520
+ # Encode as UTF-8. If 'coerce_to_utf8' is set to true in the config, any
521
+ # non-UTF-8 character would be replaced by the string specified by
522
+ # 'non_utf8_replacement_string'. If 'coerce_to_utf8' is set to false, any
523
+ # non-UTF-8 character would trigger the plugin to error out.
524
+ def convert_to_utf8(input)
525
+ if @coerce_to_utf8
526
+ input.encode(
527
+ 'utf-8',
528
+ invalid: :replace,
529
+ undef: :replace,
530
+ replace: @non_utf8_replacement_string)
531
+ else
532
+ begin
533
+ input.encode('utf-8')
534
+ rescue EncodingError
535
+ @log.error 'Encountered encoding issues potentially due to non ' \
536
+ 'UTF-8 characters. To allow non-UTF-8 characters and ' \
537
+ 'replace them with spaces, please set "coerce_to_utf8" ' \
538
+ 'to true.'
539
+ raise
540
+ end
541
+ end
542
+ end
543
+
544
+ def ensure_array(value)
545
+ Array.try_convert(value) || (fail JSON::ParserError, "#{value.class}")
546
+ end
547
+
548
+ def ensure_hash(value)
549
+ Hash.try_convert(value) || (fail JSON::ParserError, "#{value.class}")
550
+ end
551
+
552
+ # Increment the metric for the number of successful requests.
553
+ def increment_successful_requests_count
554
+ return unless @successful_requests_count
555
+ @successful_requests_count.increment
556
+ end
557
+
558
+ # Increment the metric for the number of failed requests, labeled by
559
+ # the provided status code.
560
+ def increment_failed_requests_count
561
+ return unless @failed_requests_count
562
+ @failed_requests_count.increment
563
+ end
564
+
565
+ # Increment the metric for the number of log entries, successfully
566
+ # ingested by the Datadog Log Intake API.
567
+ def increment_ingested_entries_count(count)
568
+ return unless @ingested_entries_count
569
+ @ingested_entries_count.increment({}, count)
570
+ end
571
+
572
+ # Increment the metric for the number of log entries that were dropped
573
+ # and not ingested by the Datadog Log Intake API.
574
+ def increment_dropped_entries_count(count)
575
+ return unless @dropped_entries_count
576
+ @dropped_entries_count.increment({}, count)
577
+ end
578
+
579
+ # Increment the metric for the number of log entries that were dropped
580
+ # and not ingested by the Datadog Log Intake API.
581
+ def increment_retried_entries_count(count)
582
+ return unless @retried_entries_count
583
+ @retried_entries_count.increment({}, count)
584
+ end
585
+ end
586
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-datadog-log
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.0.rc10
4
+ version: 0.1.0.rc11
5
5
  platform: ruby
6
6
  authors:
7
7
  - Yusuke KUOKA
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-11-30 00:00:00.000000000 Z
11
+ date: 2017-12-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd
@@ -16,14 +16,14 @@ dependencies:
16
16
  requirements:
17
17
  - - "~>"
18
18
  - !ruby/object:Gem::Version
19
- version: '0.14'
19
+ version: 1.0.0
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
24
  - - "~>"
25
25
  - !ruby/object:Gem::Version
26
- version: '0.14'
26
+ version: 1.0.0
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: json
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -169,9 +169,11 @@ files:
169
169
  - fluent-plugin-datadog.gemspec~
170
170
  - lib/datadog/log.rb
171
171
  - lib/datadog/log.rb~
172
+ - lib/fluent/plugin/#out_datadog_log.rb#
172
173
  - lib/fluent/plugin/monitoring.rb
173
174
  - lib/fluent/plugin/out_datadog_log.rb
174
175
  - lib/fluent/plugin/out_datadog_log.rb~
176
+ - pkg/fluent-plugin-datadog-log-0.1.0.rc10.gem
175
177
  - test/helper.rb
176
178
  - test/plugin/base_test.rb
177
179
  - test/plugin/constants.rb