fluent-plugin-newrelic 1.1.4 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8cb4af63cf04a60335e50efdc0861bb8207a8486abf19c119f19622c0d36f523
4
- data.tar.gz: c64ccd499c958400b1be3459c7ec9c864515baa7155839f817a6627e5bef14a5
3
+ metadata.gz: aa7f85405e5ce666734d15281351981ca8d702e75efafc4b70221a5967c04426
4
+ data.tar.gz: cf1c6e0c8d5bb12f5f3c2af3adf05c414eb79ae80b700c70ab606b5ba3235c1c
5
5
  SHA512:
6
- metadata.gz: da6b7569f2f5555c902673beff340926993167cb68b8f11f6154c80fe48a82158ff7eb41c86828517ee56f1957417822ad25a8203ef18e9a9c4b4121222f93cd
7
- data.tar.gz: e7bbc86ca57b5c20df3e81c7ff61750b0a8e0cad1424ec22ca1cab875de2159fabb8becb768600b7959cab7cd48a4c69a51af4ba3204fc79ae656932dfac4ead
6
+ metadata.gz: e543ee72fc1808b59c121b85bc14df698c7eaeb8c762276a806f8171d07beccab88fb246f6d05277fb739602137ee77324013cf6aa87467b7528a8b89c345080
7
+ data.tar.gz: d7bcfaf304976556e14b666a5068d6cb16db89f1188712a5964e0ac25a3e5b808bf53144cbf8160bb71272c654ea35d245d395e393d43ad5bc43eabd49bf4d4b
@@ -0,0 +1,40 @@
1
+ name: New Relic Fluentd Output Plugin - Merge to master
2
+
3
+ on:
4
+ push:
5
+ branches:
6
+ - master
7
+
8
+ jobs:
9
+ ci:
10
+ name: Continuous Delivery pipeline
11
+ runs-on: ubuntu-18.04
12
+
13
+ steps:
14
+ - name: Checkout code
15
+ uses: actions/checkout@v2
16
+
17
+ - name: Setup Ruby, bundler and install dependencies
18
+ uses: ruby/setup-ruby@v1
19
+ with:
20
+ ruby-version: ruby-2.7.2
21
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
22
+
23
+ - name: Run unit tests
24
+ run: bundle exec rake
25
+
26
+ - name: Publish Unit Test Results
27
+ uses: EnricoMi/publish-unit-test-result-action@v1.5
28
+ if: always()
29
+ with:
30
+ github_token: ${{ secrets.GITHUB_TOKEN }}
31
+ files: '**/TEST-*.xml'
32
+
33
+ - name: Build gem
34
+ run: gem build newrelic-fluentd-output.gemspec
35
+
36
+ - name: Publish fluent-plugin-newrelic to rubygems.org
37
+ env:
38
+ GEM_HOST_API_KEY: ${{ secrets.GEM_HOST_API_KEY }}
39
+ run: |
40
+ gem push fluent-plugin-newrelic-*.gem
@@ -0,0 +1,28 @@
1
+ name: New Relic Fluentd Output Plugin - Pull Request
2
+
3
+ on: [pull_request]
4
+
5
+ jobs:
6
+ ci:
7
+ name: Continuous Integration pipeline
8
+ runs-on: ubuntu-18.04
9
+
10
+ steps:
11
+ - name: Checkout code
12
+ uses: actions/checkout@v2
13
+
14
+ - name: Setup Ruby, bundler and install dependencies
15
+ uses: ruby/setup-ruby@v1
16
+ with:
17
+ ruby-version: ruby-2.5.8
18
+ bundler-cache: true # runs 'bundle install' and caches installed gems automatically
19
+
20
+ - name: Run unit tests
21
+ run: bundle exec rake
22
+
23
+ - name: Publish Unit Test Results
24
+ uses: EnricoMi/publish-unit-test-result-action@v1.5
25
+ if: always()
26
+ with:
27
+ github_token: ${{ secrets.GITHUB_TOKEN }}
28
+ files: '**/TEST-*.xml'
@@ -0,0 +1,31 @@
1
+ # NOTE: This file should always be named `repolinter.yml` to allow
2
+ # workflow_dispatch to work properly
3
+ name: Repolinter Action
4
+
5
+ # NOTE: This workflow will ONLY check the default branch!
6
+ # Currently there is no elegant way to specify the default
7
+ # branch in the event filtering, so branches are instead
8
+ # filtered in the "Test Default Branch" step.
9
+ on: [push, workflow_dispatch]
10
+
11
+ jobs:
12
+ repolint:
13
+ name: Run Repolinter
14
+ runs-on: ubuntu-latest
15
+ steps:
16
+ - name: Test Default Branch
17
+ id: default-branch
18
+ uses: actions/github-script@v2
19
+ with:
20
+ script: |
21
+ const data = await github.repos.get(context.repo)
22
+ return data.data && data.data.default_branch === context.ref.split('/').slice(-1)[0]
23
+ - name: Checkout Self
24
+ if: ${{ steps.default-branch.outputs.result == 'true' }}
25
+ uses: actions/checkout@v2
26
+ - name: Run Repolinter
27
+ if: ${{ steps.default-branch.outputs.result == 'true' }}
28
+ uses: newrelic/repolinter-action@v1
29
+ with:
30
+ config_url: https://raw.githubusercontent.com/newrelic/.github/main/repolinter-rulesets/community-plus.yml
31
+ output_type: issue
data/.gitignore CHANGED
@@ -5,7 +5,7 @@
5
5
  /coverage/
6
6
  /doc/
7
7
  /pkg/
8
- /spec/reports/
8
+ /test/reports/
9
9
  /tmp/
10
10
  .DS_Store
11
11
  .ruby-version
data/DEVELOPER.md CHANGED
@@ -44,34 +44,3 @@ instead of `<match **>`), so that your output plugin does not also pick up thing
44
44
  * Make sure things start up OK: `tail -f /var/log/td-agent/td-agent.log`
45
45
  * Cause a change that you've configured Fluentd to pick up: (`echo "FluentdTest" >> /usr/local/var/log/test.log`
46
46
  * Look in `https://one.newrelic.com/launcher/logger.log-launcher` for your log message ("FluentdTest")
47
-
48
- ## Pushing changes to the public repo
49
-
50
- After updating the New Relic repo with changes, changes will need to be pushed to the public GitHub repo
51
- at: https://github.com/newrelic/newrelic-fluentd-output
52
-
53
- Make sure you have the public set up as a remote called `public`:
54
- ```
55
- git remote add public git@github.com:newrelic/newrelic-fluentd-output.git
56
- ```
57
-
58
- Sync:
59
- ```
60
- git checkout --orphan single-commit-for-public-sync && \
61
- # Remove things we don't want sent to the public-sync repo \
62
- rm grandcentral.yml DEVELOPER.md && \
63
- # Create a single commit and force push it, overwriting the remote master \
64
- git commit -am "Mirrored commit" && \
65
- git push --force public single-commit-for-public-sync:master && \
66
- # Clean up \
67
- git checkout master && \
68
- git branch -D single-commit-for-public-sync
69
- ```
70
-
71
- # Push changes to RubyGems
72
- After updating the source code and gem version in `version.rb`, push the changes to RubyGems.
73
- Note, you must be a gem owner to publish changes on [RubyGems.org](https://rubygems.org/profiles/NR-LOGGING)
74
-
75
- * Build the gem: `gem build newrelic-fluentd-output.gemspec`
76
- * Publish the gem: `gem push fluent-plugin-newrelic-<VERSION>.gem`
77
- with the updated version (example: `gem push fluent-plugin-newrelic-0.2.2.gem`)
data/Gemfile CHANGED
@@ -4,5 +4,5 @@ source 'https://rubygems.org'
4
4
  gemspec
5
5
 
6
6
  group :development do
7
- gem "rspec"
7
+ gem "ci_reporter_test_unit"
8
8
  end
data/README.md CHANGED
@@ -1,14 +1,22 @@
1
+ [![Community Project header](https://github.com/newrelic/opensource-website/raw/master/src/images/categories/Community_Project.png)](https://opensource.newrelic.com/oss-category/#community-project)
2
+
1
3
  # fluent-plugin-newrelic
2
4
 
3
5
  A [Fluentd](https://fluentd.org/) output plugin that sends logs to New Relic
4
6
 
5
7
  This project is provided AS-IS WITHOUT WARRANTY OR SUPPORT, although you can report issues and contribute to the project here on GitHub.
6
8
 
9
+ ## Examples
10
+
11
+ Please see the [examples](examples/) directory for ways to build a Docker image with the New Relic output plugin and other configuration types
12
+ that could be useful in your environment.
13
+
7
14
  ## Prerequisites
8
15
 
9
16
  Fluentd >= v1.0
10
17
 
11
18
  ## Installation
19
+
12
20
  Add the plugin to your fluentd agent:
13
21
 
14
22
  `fluent-gem install fluent-plugin-newrelic`
@@ -23,22 +31,22 @@ For more info, review [Fluentd's official documentation](https://docs.fluentd.or
23
31
 
24
32
  ### Required plugin configuration
25
33
 
26
- Exactly one of the following:
34
+ This plugin must be configured with either a New Relic API Insert key, or a New Relic License key.
35
+ If both types of keys are specified, the API Insert key will take precedence.
27
36
 
28
- | Property | Description |
29
- |---|---|
30
- | api_key | your New Relic API Insert key |
31
- | license_key | your New Relic License key |
37
+ To specify an API Insert key, either set the `api_key` property in the configuration, or set the `NEW_RELIC_API_KEY` environment variable. If both are specified, the configuration property will take precedence.
38
+
39
+ To specify a license key, either set the `license_key` property in the configuration, or set the `NEW_RELIC_LICENSE_KEY` environment variable. If both are specified, the configuration property will take precedence.
32
40
 
33
41
  ### Optional plugin configuration
34
42
 
35
43
  | Property | Description | Default value |
36
44
  |---|---|---|
37
- | base_uri | New Relic ingestion endpoint | 'https://log-api.newrelic.com/log/v1' |
45
+ | base_uri | New Relic ingestion endpoint | `https://log-api.newrelic.com/log/v1` |
38
46
 
39
47
  ### EU plugin configuration
40
48
 
41
- If you are running this plugin in the eu set the `base_uri` to `http://log-api.eu.newrelic.com/log/v1`.
49
+ If you are running this plugin in the eu set the `base_uri` to `https://log-api.eu.newrelic.com/log/v1`.
42
50
 
43
51
  ### Fields
44
52
 
@@ -53,6 +61,7 @@ Add one of the following blocks to your Fluentd config file (with your specific
53
61
  #### Using Insights Inserts Key
54
62
 
55
63
  Example using Insights Insert key:
64
+
56
65
  ```rb
57
66
  <match **>
58
67
  @type newrelic
@@ -64,7 +73,9 @@ Getting your New Relic Insights Insert key:
64
73
  `https://insights.newrelic.com/accounts/<ACCOUNT_ID>/manage/api_keys`
65
74
 
66
75
  #### Using License Key
76
+
67
77
  Example using License key:
78
+
68
79
  ```rb
69
80
  <match **>
70
81
  @type newrelic
@@ -75,6 +86,18 @@ Example using License key:
75
86
  Getting your New Relic license key:
76
87
  `https://rpm.newrelic.com/accounts/<ACCOUNT_ID>`
77
88
 
89
+ **A note about vulnerabilities**
90
+
91
+ As noted in our [security policy](../../security/policy), New Relic is committed to the privacy and security of our customers and their data. We believe that providing coordinated disclosure by security researchers and engaging with the security community are important means to achieve our security goals.
92
+
93
+ If you believe you have found a security vulnerability in this project or any of New Relic's products or websites, we welcome and greatly appreciate you reporting it to New Relic through [HackerOne](https://hackerone.com/newrelic).
94
+
95
+ If you would like to contribute to this project, review [these guidelines](https://opensource.newrelic.com/code-of-conduct/).
96
+
97
+ ## License
98
+ newrelic-fluentd-output is licensed under the [Apache 2.0](http://apache.org/licenses/LICENSE-2.0.txt) License.
99
+
100
+
78
101
  ## Copyright
79
102
 
80
103
  * Copyright(c) 2019 - New Relic
data/Rakefile CHANGED
@@ -1,2 +1,19 @@
1
- require "bundler/gem_tasks"
1
+ require "bundler"
2
+ Bundler::GemHelper.install_tasks
3
+
4
+ require "rake/testtask"
5
+ require 'ci/reporter/rake/test_unit'
6
+
7
+ Rake::TestTask.new(:test) do |t|
8
+ t.libs.push("lib", "test")
9
+ t.test_files = FileList["test/**/*_test.rb"]
10
+ t.verbose = true
11
+ t.warning = true
12
+ end
13
+
14
+ namespace :ci do
15
+ task :all => ['ci:setup:testunit', 'test']
16
+ end
17
+
18
+ task default: ["ci:all"]
2
19
 
@@ -0,0 +1,5 @@
1
+ FROM fluent/fluentd:v1.9.1-1.0
2
+
3
+ USER root
4
+
5
+ RUN fluent-gem install fluent-plugin-newrelic
@@ -0,0 +1,85 @@
1
+ ####
2
+ ## New Relic Logs - Basic Windows Event + IIS config v1.5
3
+ ## Built in FluentD v1.7.4
4
+ ## by AI of NR 2/4/2020
5
+ ##
6
+ ## Don't forget to add your NR license key to the <match> statement at the bottom of this file.
7
+ ##
8
+
9
+ ## Windows Event Log Input
10
+ <source>
11
+ @type windows_eventlog
12
+ @id windows_eventlog
13
+ tag winevt.raw
14
+ channels application,system,security
15
+ <storage>
16
+ @type local
17
+ persistent true
18
+ path c:/opt/td-agent/winevt.pos
19
+ </storage>
20
+ </source>
21
+
22
+ #
23
+ # Windows IIS log parsing. This config uses the standard W3C format and the standard location for IIS logs.
24
+ # It expects the log timestamp to be UTC.
25
+ # Change the path below if you store your logs elsewhere.
26
+ #
27
+ <source>
28
+ @type tail
29
+ tag iislog.raw
30
+ path c:/inetpub/logs/logfiles/*/*
31
+ pos_file c:/opt/td-agent/iislog.pos
32
+ <parse>
33
+ @type regexp
34
+ expression /(?<time>\d{4}-\d{2}-\d{2} [\d:]+) (?<message>.+)/
35
+ time_format %Y-%m-%d %H:%M:%S
36
+ utc true
37
+ </parse>
38
+ </source>
39
+
40
+ <filter iislog.raw>
41
+ @type parser
42
+ key_name message
43
+ remove_key_name_field false
44
+ reserve_data true
45
+ reserve_time true
46
+ <parse>
47
+ @type csv
48
+ delimiter ' '
49
+ keys hostname,req_method,req_uri,cs-uri-query,s_port,cs-username,c_ip,req_ua,req_referer,http_code,sc-substatus,sc-win32-status,time-taken
50
+ null_value_pattern -
51
+ null_empty_string true
52
+ </parse>
53
+ </filter>
54
+
55
+ #
56
+ # For a slightly nicer experience, add Service Name (s-sitename) to your log output, comment out the filter above and use this one instead.
57
+ #
58
+ #<filter iislog.raw>
59
+ # @type parser
60
+ # key_name message
61
+ # remove_key_name_field false
62
+ # reserve_data true
63
+ # reserve_time true
64
+ # <parse>
65
+ # @type csv
66
+ # delimiter ' '
67
+ # keys service_name,hostname,req_method,req_uri,cs-uri-query,s_port,cs-username,c_ip,req_ua,req_referer,http_code,sc-substatus,sc-win32-status,time-taken
68
+ # null_value_pattern -
69
+ # null_empty_string true
70
+ # </parse>
71
+ #</filter>
72
+
73
+ <filter winevt.raw>
74
+ @type record_transformer
75
+ <record>
76
+ message ${record["description"]}
77
+ hostname ${record["computer_name"]}
78
+ </record>
79
+ </filter>
80
+
81
+ # New Relic output
82
+ <match **>
83
+ @type newrelic
84
+ api_key <YOUR INSERT KEY>
85
+ </match>
@@ -0,0 +1,43 @@
1
+ #Tail and parse Apache access logs
2
+
3
+ <source>
4
+ @type tail
5
+ @id input_tail_apache
6
+ tag apache_access
7
+ path /var/log/apache2/access.log
8
+ pos_file /var/log/apache2/access.pos
9
+ path_key filename
10
+ <parse>
11
+ @type apache2
12
+ </parse>
13
+ </source>
14
+
15
+ #Add hostname and tag fields to all events ("records") with a Fluentd tag of apache_access
16
+
17
+ <filter apache_access>
18
+ @type record_transformer
19
+ <record>
20
+ hostname "#{Socket.gethostname}"
21
+ tag ${tag}
22
+ </record>
23
+ </filter>
24
+
25
+ #Output (https://docs.fluentd.org/output/copy) events to both New Relic and a local file.
26
+
27
+ <match **>
28
+ @type copy
29
+ <store>
30
+ @type newrelic
31
+ api_key <YOUR INSERT KEY>
32
+ </store>
33
+ <store>
34
+ @type file
35
+ path /var/log/apacheout.log
36
+ <buffer>
37
+ #Buffer settings are for testing and not recommended for use in a production environment.
38
+ timekey 10s
39
+ timekey_use_utc true
40
+ timekey_wait 15s
41
+ </buffer>
42
+ </store>
43
+ </match>
@@ -0,0 +1,32 @@
1
+ #Tail and parse Docker log files
2
+
3
+ <source>
4
+ @type tail
5
+ path /var/lib/docker/containers/*/*-json.log
6
+ pos_file /var/log/docker-log.pos
7
+ read_from_head true
8
+ tag containers
9
+ <parse>
10
+ @type json
11
+ time_format %Y-%m-%dT%H:%M:%S.%NZ
12
+ keep_time_key true
13
+ time_key time
14
+ </parse>
15
+ </source>
16
+
17
+ <filter containers>
18
+ @type record_transformer
19
+ enable_ruby true
20
+ <record>
21
+ #Add hostname and tag fields to all records
22
+ fluentd_host "#{Socket.gethostname}"
23
+ tag ${tag}
24
+ </record>
25
+ </filter>
26
+
27
+ # Forward events to New Relic
28
+
29
+ <match containers>
30
+ @type newrelic
31
+ api_key <YOUR INSERT KEY>
32
+ </match>
@@ -0,0 +1,29 @@
1
+ #Tail arbitrary text/log file
2
+
3
+ <source>
4
+ @type tail
5
+ <parse>
6
+ @type none
7
+ </parse>
8
+ path /var/log/backend-app*.log
9
+ pos_file /var/log/backend.application.pos
10
+ path_key filename # Add watched file path to path_key field for every event/record.
11
+ tag backend.application
12
+ </source>
13
+
14
+ #Add hostname and tag fields to all events ("records") with a Fluentd tag of backend.application
15
+
16
+ <filter backend.application>
17
+ @type record_transformer
18
+ <record>
19
+ hostname "#{Socket.gethostname}"
20
+ tag ${tag}
21
+ </record>
22
+ </filter>
23
+
24
+ #Write events to New Relic
25
+
26
+ <match backend.application>
27
+ @type newrelic
28
+ api_key <YOUR INSERT KEY>
29
+ </match>
@@ -0,0 +1,52 @@
1
+ #Tail and parse arbitrary text/log file
2
+
3
+ <source>
4
+ @type tail
5
+ <parse> #Parse timestamp, everything else to be stored in message field
6
+ @type regexp
7
+ expression /^\[(?<logtime>[^\]]*)\] (?<message>.*)$/
8
+ time_key logtime
9
+ time_format %Y-%m-%d %H:%M:%S %z
10
+ </parse>
11
+ path /var/log/backend-app*.log
12
+ pos_file /var/log/backend.application.pos
13
+ path_key filename # Add watched file path to path_key field for every event/record.
14
+ tag backend.application
15
+ </source>
16
+
17
+ #Add hostname and service_name fields to all events ("records") with a Fluentd tag of backend.application
18
+
19
+ <filter backend.application>
20
+ @type record_transformer
21
+ <record>
22
+ hostname "#{Socket.gethostname}"
23
+ service_name ${tag}
24
+ </record>
25
+ </filter>
26
+
27
+ # For all events with a tag of backend.application:
28
+ # Keep ONLY events where service_name field contains a value matching /backend.application/ AND where message field contains a value matching /Cannot connect to/
29
+ # Discard any events where value of hostname field matches /staging/
30
+
31
+ <filter backend.application>
32
+ @type grep
33
+ <regexp>
34
+ key service_name
35
+ pattern /backend.application/
36
+ </regexp>
37
+ <regexp>
38
+ key message
39
+ pattern /Cannot connect to/
40
+ </regexp>
41
+ <exclude>
42
+ key hostname
43
+ pattern /staging/
44
+ </exclude>
45
+ </filter>
46
+
47
+ #Write events with backend.application tag to New Relic
48
+
49
+ <match backend.application>
50
+ @type newrelic
51
+ api_key <YOUR INSERT KEY>
52
+ </match>
@@ -0,0 +1,43 @@
1
+ #Tail arbitrary log file and parse using grok pattern
2
+ #Install the required plugin: fluent-gem install fluent-plugin-grok-parser
3
+
4
+ <source>
5
+ @type tail
6
+ <parse>
7
+ @type grok
8
+ <grok>
9
+ pattern %{SYSLOGTIMESTAMP:timestamp} %{LOGLEVEL:loglevel}: %{GREEDYDATA:message}
10
+ </grok>
11
+ </parse>
12
+ path /var/log/customapp.log
13
+ pos_file /var/log/customapp.pos
14
+ path_key filename
15
+ tag custom.application
16
+ </source>
17
+
18
+ # Drop events with custom.application tag where loglevel field contains "debug" or "info" (case-insensitive match)
19
+
20
+ <filter custom.application>
21
+ @type grep
22
+ <exclude>
23
+ key loglevel
24
+ pattern /debug|info/i
25
+ </exclude>
26
+ </filter>
27
+
28
+ #Add hostname and tag fields to all events ("records") with a Fluentd tag of custom.application
29
+
30
+ <filter custom.application>
31
+ @type record_transformer
32
+ <record>
33
+ hostname "#{Socket.gethostname}"
34
+ tag ${tag}
35
+ </record>
36
+ </filter>
37
+
38
+ #Write custom.application events to New Relic
39
+
40
+ <match custom.application>
41
+ @type newrelic
42
+ api_key <YOUR INSERT KEY>
43
+ </match>
@@ -0,0 +1,27 @@
1
+ #Tail arbitrary text/log file
2
+
3
+ <source>
4
+ @type tail
5
+ <parse>
6
+ @type none
7
+ </parse>
8
+ path /home/logs/*
9
+ path_key file
10
+ tag sample.tag
11
+ </source>
12
+
13
+ #Add service_name field to all events ("records") with a Fluentd tag of sample.tag
14
+
15
+ <filter sample.tag>
16
+ @type record_transformer
17
+ <record>
18
+ service_name ${tag}
19
+ </record>
20
+ </filter>
21
+
22
+ #Write sample.tag events to New Relic
23
+
24
+ <match sample.tag>
25
+ @type newrelic
26
+ api_key <YOUR INSERT KEY>
27
+ </match>
@@ -0,0 +1,11 @@
1
+ <filter backend.application>
2
+ @type parser
3
+ <parse>
4
+ @type multiline_grok
5
+ grok_failure_key grokfailure
6
+ multiline_start_regex ^abc
7
+ <grok>
8
+ pattern %{GREEDYDATA:message}
9
+ </grok>
10
+ </parse>
11
+ </filter>
@@ -0,0 +1,17 @@
1
+ #Tail and parse NGINX log file
2
+
3
+ <source>
4
+ @type tail
5
+ <parse>
6
+ @type nginx
7
+ </parse>
8
+ path /path/to/access.log
9
+ tag nginx.access
10
+ </source>
11
+
12
+ #Write events with tag matching nginx.* to New Relic
13
+
14
+ <match nginx.*>
15
+ @type newrelic
16
+ api_key <YOUR INSERT KEY>
17
+ </match>
@@ -0,0 +1,71 @@
1
+ # Fluentd -> New Relic Examples
2
+
3
+ ## Creating a Docker Image
4
+
5
+ Using [Fluentd](https://www.fluentd.org/) as an image makes it simple to deploy a quick input logging solution for functions such as [Syslogs](https://docs.fluentd.org/input/syslog), [HTTP](https://docs.fluentd.org/input/http), custom [UDP](https://docs.fluentd.org/input/udp) and [TCP](https://docs.fluentd.org/input/tcp) use cases, [SNMP](https://github.com/iij/fluent-plugin-snmp), along with many other functions. The [Fluentd](https://www.fluentd.org/) team has put together a great [set of documents](https://docs.fluentd.org/container-deployment) to help you get their basic configuration setup. After that, you will want to get your logs flowing into [New Relic Logs](https://docs.newrelic.com/docs/logs/new-relic-logs/get-started/introduction-new-relic-logs) to create alerts and monitor your systems.
6
+
7
+ If you are able to use the Fluentd image directly, it is really simple build on that image and add the New Relic Fluentd Output Plugin. The below set of steps assumes you have some basic understanding of building a Docker image.
8
+
9
+ ### Docker Image: Steps
10
+
11
+ #### 1. Create a `Dockerfile`
12
+
13
+ It doesn't take much to get the New Relic Output Plugin into a docker image. Here is a good example from Docker on how best to create an image, [LINK](https://docs.docker.com/develop/develop-images/dockerfile_best-practices/).
14
+
15
+ We have an [example Docker file](Dockerfile) in this folder which can be used to get moving quickly.
16
+
17
+ #### 2. Build the Image
18
+
19
+ The build process is simple and will register a newly created image in your local Docker repository. If you want to use this image for multiple machines, you will need to publish the image to a location you can access.
20
+
21
+ ```bash
22
+ # Run this command in the same directory as the Docker file or point to its location
23
+ docker build --tag nr-fluent:latest nri-fluentd .
24
+
25
+ # Run this command to verify the image was created
26
+ docker image ls
27
+
28
+ REPOSITORY TAG IMAGE ID CREATED SIZE
29
+ nr-fluent latest 70c388b63afc 1 minute ago 44.9MB
30
+ ```
31
+
32
+ #### 3. Run the Docker Image
33
+
34
+ The next steps assume that you have already created a `fluentd.conf` file with is ready to being monitoring. If you haven't, you must do so before you continue.
35
+
36
+ In the following example can be used if you are going to run a syslog server on the image.
37
+
38
+ ```bash
39
+ # Notice that the syntax for exposing the UDP port is a bit different
40
+ # In testing, it appeared that trying to map the UDP port to from a different one configured in the Fluentd config file didn't work as expected
41
+ docker run -d --name "syslog" -p 0.0.0.0:5140:5140/udp -p 0.0.0.0:5142:5142/udp -v /etc/fluentd:/fluentd/etc -e FLUENTD_CONF=fluentd.conf nr-fluent:latest
42
+ ```
43
+
44
+ ## Configuring a Syslog Server with Fluentd and New Relic Logs
45
+
46
+ In the below example, we are going to create a `fluentd.conf` which will enable a syslog server. It is a good idea to determine if your syslog server is going
47
+ to connect to this service using UDP or TCP. By default, most syslog servers will likely use UDP. In the below example, I am setting up two syslog listeners for
48
+ Ubiquiti [Edgemax router](https://help.ubnt.com/hc/en-us/articles/204975904-EdgeRouter-Remote-Syslog-Server-for-System-Logs) and the [Unifi Security Gateway and Access points](https://community.ui.com/questions/syslog-server-and-unifi-logs/bbde4318-e73f-4efe-b1b9-ae11319cc1d9).
49
+
50
+ ### Fluentd.Conf: Steps
51
+
52
+ #### 1. Create a `fluentd.conf` file in a known directory on the host machine
53
+
54
+ Below, I have chosen `/etc/fluentd/` as my directory.
55
+
56
+ ```bash
57
+ # Make the directory
58
+ sudo mkdir /etc/fluentd
59
+
60
+ # Edit the file
61
+ sudo nano /etc/fluentd/fluentd.conf
62
+ ```
63
+
64
+ #### 2. Add the contents from the [`syslog/fluentd.conf`](syslog/fluentd.config)
65
+
66
+ You can find the contents of the [`syslog/fluentd.conf`](syslog/fluentd.config) in the sub folder `syslog`. These contents should provide a quick start to getting started. In the provided
67
+ example, the syslog details are coming from the above mentioned devices. You may need to tweak the configuration according to the server sending the syslog traffic.
68
+
69
+ #### 3. Check New Relic for New Logs
70
+
71
+ That is all it should take to get a new stream of logs coming from those devices.
@@ -0,0 +1,47 @@
1
+ # UNIFI
2
+ <source>
3
+ @type syslog
4
+ port 5140
5
+ bind 0.0.0.0
6
+ tag unifi
7
+ </source>
8
+
9
+ <filter unifi.**>
10
+ @type record_transformer
11
+ renew_record true
12
+ enable_ruby true
13
+ <record>
14
+ timestamp ${time.to_f}
15
+ hostname ${record["host"][/(^.*?),/, 1]}
16
+ service ${tag_prefix[1]}
17
+ log_level ${tag_suffix[2]}
18
+ message ${record.to_json}
19
+ </record>
20
+ </filter>
21
+
22
+ # EDGE MAX
23
+ <source>
24
+ @type syslog
25
+ port 5142
26
+ bind 0.0.0.0
27
+ tag edge-max
28
+ </source>
29
+
30
+ <filter edge-max.**>
31
+ @type record_transformer
32
+ renew_record true
33
+ enable_ruby true
34
+ <record>
35
+ timestamp ${time.to_f}
36
+ hostname ${record["host"]}
37
+ service ${tag_prefix[1]}
38
+ log_level ${tag_suffix[2]}
39
+ message ${record.to_json}
40
+ </record>
41
+ </filter>
42
+
43
+ # Send data
44
+ <match **>
45
+ @type newrelic
46
+ api_key <CHANGE TO YOUR LICENSE KEY HERE>
47
+ </match>
@@ -0,0 +1,5 @@
1
+ <source>
2
+ @type syslog
3
+ port 5140
4
+ tag syslog.messages
5
+ </source>
@@ -33,6 +33,7 @@ module Fluent
33
33
  config_param :license_key, :string, :default => nil
34
34
 
35
35
  DEFAULT_BUFFER_TYPE = 'memory'.freeze
36
+ MAX_PAYLOAD_SIZE = 1000000 # bytes
36
37
 
37
38
  config_section :buffer do
38
39
  config_set_default :@type, DEFAULT_BUFFER_TYPE
@@ -49,17 +50,19 @@ module Fluent
49
50
 
50
51
  def configure(conf)
51
52
  super
53
+
54
+ @api_key ||= ENV["NEW_RELIC_API_KEY"]
55
+ @license_key ||= ENV["NEW_RELIC_LICENSE_KEY"]
52
56
  if @api_key.nil? && @license_key.nil?
53
- raise Fluent::ConfigError.new("'api_key' or `license_key` parameter is required")
57
+ raise Fluent::ConfigError.new("'api_key' or 'license_key' parameter is required")
54
58
  end
55
59
 
56
60
  # create initial sockets hash and socket based on config param
57
61
  @end_point = URI.parse(@base_uri)
58
62
  auth = {
59
63
  @api_key.nil? ? 'X-License-Key' : 'X-Insert-Key' =>
60
- @api_key.nil? ? @license_key : @api_key
64
+ @api_key.nil? ? @license_key : @api_key
61
65
  }
62
- puts auth
63
66
  @header = {
64
67
  'X-Event-Source' => 'logs',
65
68
  'Content-Encoding' => 'gzip'
@@ -68,6 +71,9 @@ module Fluent
68
71
  end
69
72
 
70
73
  def package_record(record, timestamp)
74
+ if defined? timestamp.nsec
75
+ timestamp = timestamp * 1000 + timestamp.nsec / 1_000_000
76
+ end
71
77
  packaged = {
72
78
  'timestamp' => timestamp,
73
79
  # non-intrinsic attributes get put into 'attributes'
@@ -87,39 +93,23 @@ module Fluent
87
93
  packaged['message'] = record['log']
88
94
  packaged['attributes'].delete('log')
89
95
  end
90
-
96
+
91
97
  packaged
92
98
  end
93
99
 
94
100
  def write(chunk)
95
- payload = {
96
- 'common' => {
97
- 'attributes' => {
98
- 'plugin' => {
99
- 'type' => 'fluentd',
100
- 'version' => NewrelicFluentdOutput::VERSION,
101
- }
102
- }
103
- },
104
- 'logs' => []
105
- }
101
+ logs = []
106
102
  chunk.msgpack_each do |ts, record|
107
103
  next unless record.is_a? Hash
108
104
  next if record.empty?
109
- payload['logs'].push(package_record(record, ts))
105
+ logs.push(package_record(record, ts))
110
106
  end
111
- io = StringIO.new
112
- gzip = Zlib::GzipWriter.new(io)
113
107
 
114
- # Fluentd can run with a version of Ruby (2.1.0) whose to_json method doesn't support non-ASCII characters.
115
- # So we use Yajl, which can handle all Unicode characters. Apparently this library is what Fluentd uses
116
- # internally, so it is installed by default with td-agent.
117
- # See https://github.com/fluent/fluentd/issues/215
118
- gzip << Yajl.dump([payload])
119
- gzip.close
120
- send_payload(io.string)
108
+
109
+ payloads = get_compressed_payloads(logs)
110
+ payloads.each { |payload| send_payload(payload) }
121
111
  end
122
-
112
+
123
113
  def handle_response(response)
124
114
  if !(200 <= response.code.to_i && response.code.to_i < 300)
125
115
  log.error("Response was " + response.code + " " + response.body)
@@ -130,12 +120,63 @@ module Fluent
130
120
  http = Net::HTTP.new(@end_point.host, 443)
131
121
  http.use_ssl = true
132
122
  http.verify_mode = OpenSSL::SSL::VERIFY_PEER
133
- puts @header
134
123
  request = Net::HTTP::Post.new(@end_point.request_uri, @header)
135
124
  request.body = payload
136
125
  handle_response(http.request(request))
137
126
  end
138
127
 
128
+ private
129
+
130
+ def get_compressed_payloads(logs)
131
+ return [] if logs.length == 0
132
+
133
+ payload = create_payload(logs)
134
+ compressed_payload = compress(payload)
135
+
136
+ if compressed_payload.bytesize <= MAX_PAYLOAD_SIZE
137
+ return [compressed_payload]
138
+ end
139
+
140
+ compressed_payload = nil # Free for GC
141
+
142
+ if logs.length > 1 # we can split
143
+ # let's split logs array by half, and try to create payloads again
144
+ midpoint = logs.length / 2
145
+ first_half = get_compressed_payloads(logs.slice(0, midpoint))
146
+ second_half = get_compressed_payloads(logs.slice(midpoint, logs.length))
147
+ return first_half + second_half
148
+ else
149
+ log.error("Can't compress record below required maximum packet size and it will be discarded. Record: #{logs[0]}")
150
+ return []
151
+ end
152
+ end
153
+
154
+ def create_payload(logs)
155
+ {
156
+ 'common' => {
157
+ 'attributes' => {
158
+ 'plugin' => {
159
+ 'type' => 'fluentd',
160
+ 'version' => NewrelicFluentdOutput::VERSION,
161
+ }
162
+ }
163
+ },
164
+ 'logs' => logs
165
+ }
166
+ end
167
+
168
+ def compress(payload)
169
+ io = StringIO.new
170
+ gzip = Zlib::GzipWriter.new(io)
171
+
172
+ # Fluentd can run with a version of Ruby (2.1.0) whose to_json method doesn't support non-ASCII characters.
173
+ # So we use Yajl, which can handle all Unicode characters. Apparently this library is what Fluentd uses
174
+ # internally, so it is installed by default with td-agent.
175
+ # See https://github.com/fluent/fluentd/issues/215
176
+ gzip << Yajl.dump([payload])
177
+ gzip.close
178
+ io.string
179
+ end
139
180
  end
140
181
  end
141
182
  end
@@ -1,3 +1,3 @@
1
1
  module NewrelicFluentdOutput
2
- VERSION = "1.1.4"
2
+ VERSION = "1.2.0"
3
3
  end
@@ -0,0 +1,57 @@
1
+ # Run any time a commit is merged to the 'master' branch
2
+ trigger:
3
+ - master
4
+
5
+ # There is a separate pipeline for PRs (it does not do deploys)
6
+ pr: none
7
+
8
+ pool:
9
+ vmImage: 'ubuntu-latest'
10
+
11
+ steps:
12
+ - task: DownloadSecureFile@1
13
+ name: "rubyGemsCredentials"
14
+ inputs:
15
+ secureFile: 'credentials'
16
+
17
+ - task: UseRubyVersion@0
18
+ inputs:
19
+ versionSpec: '>= 2.5'
20
+
21
+ - script: |
22
+ gem install bundler
23
+ bundle update
24
+ bundle install --retry=3 --jobs=4
25
+ displayName: 'bundle install'
26
+
27
+ - task: Bash@3
28
+ displayName: 'Run tests'
29
+ inputs:
30
+ targetType: 'inline'
31
+ script: |
32
+ bundle exec rake
33
+
34
+ - task: PublishTestResults@2
35
+ inputs:
36
+ testResultsFormat: 'JUnit'
37
+ testResultsFiles: '**/TEST-*.xml'
38
+ mergeTestResults: true
39
+ failTaskOnFailedTests: true
40
+ testRunTitle: 'Publish tests'
41
+
42
+ - task: Bash@3
43
+ displayName: 'Build gem'
44
+ inputs:
45
+ targetType: 'inline'
46
+ script: |
47
+ gem build newrelic-fluentd-output.gemspec
48
+
49
+ - task: Bash@3
50
+ displayName: 'Publish gem to RubyGems'
51
+ inputs:
52
+ targetType: 'inline'
53
+ script: |
54
+ eval "$(rbenv init -)"
55
+ mv $(rubyGemsCredentials.secureFilePath) ~/.gem/credentials
56
+ sudo chmod 600 ~/.gem/credentials
57
+ gem push fluent-plugin-newrelic-*.gem
@@ -23,4 +23,5 @@ Gem::Specification.new do |spec|
23
23
  spec.add_development_dependency "rake"
24
24
  spec.add_development_dependency "test-unit"
25
25
  spec.add_development_dependency "webmock"
26
+ spec.add_development_dependency "rspec_junit_formatter"
26
27
  end
data/pr-pipeline.yml ADDED
@@ -0,0 +1,33 @@
1
+ pr:
2
+ branches:
3
+ include:
4
+ - '*'
5
+
6
+ pool:
7
+ vmImage: 'ubuntu-latest'
8
+
9
+ steps:
10
+ - task: UseRubyVersion@0
11
+ inputs:
12
+ versionSpec: '>= 2.5'
13
+
14
+ - script: |
15
+ gem install bundler
16
+ bundle update
17
+ bundle install --retry=3 --jobs=4
18
+ displayName: 'bundle install'
19
+
20
+ - task: Bash@3
21
+ displayName: 'Run tests'
22
+ inputs:
23
+ targetType: 'inline'
24
+ script: |
25
+ bundle exec rake
26
+
27
+ - task: PublishTestResults@2
28
+ inputs:
29
+ testResultsFormat: 'JUnit'
30
+ testResultsFiles: '**/TEST-*.xml'
31
+ mergeTestResults: true
32
+ failTaskOnFailedTests: true
33
+ testRunTitle: 'Publish tests'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-newrelic
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.4
4
+ version: 1.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - New Relic Logging Team
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2019-09-30 00:00:00.000000000 Z
11
+ date: 2021-05-04 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: fluentd
@@ -80,6 +80,20 @@ dependencies:
80
80
  - - ">="
81
81
  - !ruby/object:Gem::Version
82
82
  version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: rspec_junit_formatter
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0'
83
97
  description:
84
98
  email:
85
99
  - logging-team@newrelic.com
@@ -88,18 +102,34 @@ extensions: []
88
102
  extra_rdoc_files: []
89
103
  files:
90
104
  - ".dockerignore"
105
+ - ".github/workflows/merge-to-master.yml"
106
+ - ".github/workflows/pr.yml"
107
+ - ".github/workflows/repolinter.yml"
91
108
  - ".gitignore"
92
- - ".rspec"
93
109
  - DEVELOPER.md
94
110
  - Dockerfile
95
111
  - Gemfile
96
112
  - LICENSE
97
113
  - README.md
98
114
  - Rakefile
99
- - grandcentral.yml
115
+ - examples/Dockerfile
116
+ - examples/Windows-IIS.conf
117
+ - examples/copy_output.conf
118
+ - examples/custom_field.conf
119
+ - examples/file_input.conf
120
+ - examples/filter_logs.conf
121
+ - examples/grok_parser.conf
122
+ - examples/minimal_complete_config.conf
123
+ - examples/multiline_log_parse.conf
124
+ - examples/parse_nginx.conf
125
+ - examples/readme.md
126
+ - examples/syslog/fluentd.config
127
+ - examples/syslog_input.conf
100
128
  - lib/fluent/plugin/out_newrelic.rb
101
129
  - lib/newrelic-fluentd-output/version.rb
130
+ - merge-to-master-pipeline.yml
102
131
  - newrelic-fluentd-output.gemspec
132
+ - pr-pipeline.yml
103
133
  homepage: https://github.com/newrelic/newrelic-fluentd-output
104
134
  licenses:
105
135
  - Apache-2.0
@@ -119,7 +149,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
119
149
  - !ruby/object:Gem::Version
120
150
  version: '0'
121
151
  requirements: []
122
- rubygems_version: 3.0.1
152
+ rubygems_version: 3.1.4
123
153
  signing_key:
124
154
  specification_version: 4
125
155
  summary: Sends FluentD events to New Relic
data/.rspec DELETED
@@ -1,2 +0,0 @@
1
- --format documentation
2
- --color
data/grandcentral.yml DELETED
@@ -1,29 +0,0 @@
1
- slack_channel: logging-notifications
2
- deploy_mechanism: none
3
- test_suites:
4
- - command: |
5
- mkdir -p ~/.gem
6
- curl -u $ARTIFACTORY_USERNAME:$ARTIFACTORY_PASSWORD https://artifacts.datanerd.us/api/gems/newrelic-gems-local/api/v1/api_key.yaml -o ~/.gem/credentials
7
- chmod 0600 ~/.gem/credentials
8
- bundle install && bundle exec rspec
9
- environment:
10
- dockerfile: Dockerfile
11
- name: Unit Tests
12
- build:
13
- environment:
14
- dockerfile: Dockerfile
15
- secret_env_vars:
16
- - name: ARTIFACTORY_USERNAME
17
- shared_path: artifactory
18
- - name: ARTIFACTORY_PASSWORD
19
- shared_path: artifactory
20
- publish:
21
- command: |
22
- mkdir -p ~/.gem
23
- curl -u $ARTIFACTORY_USERNAME:$ARTIFACTORY_PASSWORD https://artifacts.datanerd.us/api/gems/newrelic-gems-local/api/v1/api_key.yaml -o ~/.gem/credentials
24
- chmod 0600 ~/.gem/credentials
25
- bundle install
26
- gem release --host https://artifacts.datanerd.us/api/gems/newrelic-gems-local
27
- read_version:
28
- command: cat ./lib/newrelic-fluentd-output/version.rb | grep VERSION | awk '{
29
- gsub("\"", "", $3); print $3 }'