log-export-container 1.0.53

Sign up to get free protection for your applications and to get access to all the features.
Files changed (38) hide show
  1. checksums.yaml +7 -0
  2. data/bin/log-export-container +11 -0
  3. data/conf-utils.rb +106 -0
  4. data/create-conf.rb +22 -0
  5. data/fluentd/etc/classify-default-csv.conf +39 -0
  6. data/fluentd/etc/classify-default-json.conf +38 -0
  7. data/fluentd/etc/classify-syslog-csv.conf +94 -0
  8. data/fluentd/etc/classify-tcp-csv.conf +89 -0
  9. data/fluentd/etc/input-extract-audit-entities.conf +9 -0
  10. data/fluentd/etc/input-file-csv.conf +10 -0
  11. data/fluentd/etc/input-file-json.conf +9 -0
  12. data/fluentd/etc/input-json-chunk.conf +3 -0
  13. data/fluentd/etc/input-syslog-csv.conf +13 -0
  14. data/fluentd/etc/input-syslog-json.conf +12 -0
  15. data/fluentd/etc/input-tcp-csv.conf +12 -0
  16. data/fluentd/etc/input-tcp-json.conf +11 -0
  17. data/fluentd/etc/monitoring.conf +25 -0
  18. data/fluentd/etc/output-azure-loganalytics.conf +9 -0
  19. data/fluentd/etc/output-bigquery.conf +13 -0
  20. data/fluentd/etc/output-cloudwatch.conf +11 -0
  21. data/fluentd/etc/output-datadog.conf +10 -0
  22. data/fluentd/etc/output-elasticsearch-8.conf +5 -0
  23. data/fluentd/etc/output-kafka.conf +11 -0
  24. data/fluentd/etc/output-logz.conf +8 -0
  25. data/fluentd/etc/output-loki.conf +5 -0
  26. data/fluentd/etc/output-mongo.conf +9 -0
  27. data/fluentd/etc/output-remote-syslog.conf +11 -0
  28. data/fluentd/etc/output-s3.conf +15 -0
  29. data/fluentd/etc/output-splunk-hec.conf +12 -0
  30. data/fluentd/etc/output-stdout.conf +3 -0
  31. data/fluentd/etc/output-sumologic.conf +10 -0
  32. data/fluentd/etc/output-template.conf +4 -0
  33. data/fluentd/etc/process.conf +19 -0
  34. data/fluentd/plugins/filter_sdm_decode_chunk_events.rb +71 -0
  35. data/fluentd/plugins/parser_sdm_json.rb +29 -0
  36. data/fluentd/scripts/dump_sdm_entities.rb +117 -0
  37. data/start.rb +34 -0
  38. metadata +365 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 8a8a98cd6e60c7e4fc0ebdadb63e8d2aa1bcb347726070f3de9939efaf17ef61
4
+ data.tar.gz: 1748c27a2a52c77731f8036b4933703301a13eb5c908b1a36efde46254f76d97
5
+ SHA512:
6
+ metadata.gz: f326147c735245fa03540e2350e7b567628cced9af4a5f89a24b60f0b00a0a49ad26f13bc07577ee4c98458152dc1ac52a366233a52b3b08d7244e6e243331e2
7
+ data.tar.gz: f638b901721d0665c22279d56091e215c97d80d614d984722f7810a5538fc7e2cf929fa53e4beb9341299fa00cf1fe3800f5899f63a16f9b10cae5ce69532fd5
@@ -0,0 +1,11 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ bin_dir = File.expand_path(File.dirname(__FILE__))
4
+
5
+ ENV['FLUENTD_DIR'] = bin_dir + '/../fluentd'
6
+ ENV['LOG_EXPORT_CONTAINER_INPUT'] = ENV['LOG_EXPORT_CONTAINER_INPUT'] || 'syslog-json'
7
+ ENV['LOG_EXPORT_CONTAINER_OUTPUT'] = ENV['LOG_EXPORT_CONTAINER_OUTPUT'] || 'stdout'
8
+
9
+ start_script_path = File.join(bin_dir, '../start.rb')
10
+
11
+ require start_script_path
data/conf-utils.rb ADDED
@@ -0,0 +1,106 @@
1
+
2
+ require_relative './fluentd/scripts/dump_sdm_entities'
3
+
4
+ SUPPORTED_STORES = "stdout remote-syslog s3 cloudwatch splunk-hec datadog azure-loganalytics sumologic kafka mongo logz loki elasticsearch-8 bigquery"
5
+
6
+ def extract_value(str)
7
+ unless str
8
+ str = ""
9
+ end
10
+ str.gsub(/ /, "").downcase
11
+ end
12
+
13
+ def extract_entity_interval(entity)
14
+ if entity == 'activities'
15
+ extract_interval = extract_activities_interval
16
+ return extract_interval ? "#{extract_interval}m" : ""
17
+ end
18
+ entity_interval_match = ENV['LOG_EXPORT_CONTAINER_EXTRACT_AUDIT'].to_s.match /#{entity}\/(\d+)/
19
+ interval = entity_interval_match ? entity_interval_match[1] : 480
20
+ "#{interval}m"
21
+ end
22
+
23
+ def monitoring_conf
24
+ monitoring_enabled = extract_value(ENV['LOG_EXPORT_CONTAINER_ENABLE_MONITORING']) == "true"
25
+ if monitoring_enabled
26
+ File.read("#{ETC_DIR}/monitoring.conf")
27
+ end
28
+ end
29
+
30
+ def output_stores_conf
31
+ conf = ""
32
+ output_types = extract_value(ENV['LOG_EXPORT_CONTAINER_OUTPUT'])
33
+ stores = SUPPORTED_STORES.split(' ')
34
+ stores.each do |store|
35
+ if output_types.include?(store)
36
+ conf = "#{conf}#{store} "
37
+ end
38
+ end
39
+ if conf == ""
40
+ return "stdout"
41
+ end
42
+ conf
43
+ end
44
+
45
+ def input_conf
46
+ conf = extract_value(ENV['LOG_EXPORT_CONTAINER_INPUT'])
47
+ if conf != ""
48
+ filename = "#{ETC_DIR}/input-#{conf}.conf"
49
+ else
50
+ filename = "#{ETC_DIR}/input-syslog-json.conf"
51
+ end
52
+ File.read(filename)
53
+ end
54
+
55
+ def decode_chunk_events_conf
56
+ conf = extract_value(ENV['LOG_EXPORT_CONTAINER_INPUT'])
57
+ decode_chunks_enabled = extract_value(ENV['LOG_EXPORT_CONTAINER_DECODE_CHUNK_EVENTS']) == "true"
58
+ if (conf == "syslog-json" || conf == "tcp-json") && decode_chunks_enabled
59
+ File.read("#{ETC_DIR}/input-json-chunk.conf")
60
+ end
61
+ end
62
+
63
+ def input_extract_audit_entities_conf(entity)
64
+ extract_activities = extract_value(ENV['LOG_EXPORT_CONTAINER_EXTRACT_AUDIT_ACTIVITIES'])
65
+ extract_entities = extract_value(ENV['LOG_EXPORT_CONTAINER_EXTRACT_AUDIT'])
66
+ if entity == "activities" && extract_activities != "true" && !extract_entities.match(/activities/)
67
+ return
68
+ elsif entity != "activities" && !extract_entities.match(/#{entity}/)
69
+ return
70
+ end
71
+ read_file = File.read("#{ETC_DIR}/input-extract-audit-entities.conf")
72
+ read_file['$tag'] = AUDIT_ENTITY_TYPES[entity]
73
+ read_file['$interval'] = extract_entity_interval(entity)
74
+ read_file.gsub!("$entity", entity)
75
+ read_file
76
+ end
77
+
78
+ def default_classify_conf
79
+ conf = extract_value(ENV['LOG_EXPORT_CONTAINER_INPUT'])
80
+ if conf == "syslog-csv" || conf == "tcp-csv" || conf == "file-csv"
81
+ filename = "#{ETC_DIR}/classify-default-csv.conf"
82
+ else
83
+ filename = "#{ETC_DIR}/classify-default-json.conf"
84
+ end
85
+ File.read(filename)
86
+ end
87
+
88
+ def custom_classify_conf
89
+ conf = extract_value(ENV['LOG_EXPORT_CONTAINER_INPUT'])
90
+ if conf == "syslog-csv"
91
+ File.read("#{ETC_DIR}/classify-syslog-csv.conf")
92
+ elsif conf == "tcp-csv" || conf == "file-csv"
93
+ File.read("#{ETC_DIR}/classify-tcp-csv.conf")
94
+ end
95
+ end
96
+
97
+ def output_conf
98
+ output_content = []
99
+ stores = output_stores_conf.split(' ')
100
+ stores.each do |store|
101
+ output_content << File.read("#{ETC_DIR}/output-#{store}.conf")
102
+ end
103
+ template = File.read("#{ETC_DIR}/output-template.conf")
104
+ template["$stores"] = output_content.join("")
105
+ template
106
+ end
data/create-conf.rb ADDED
@@ -0,0 +1,22 @@
1
+
2
+ ETC_DIR="#{ENV['FLUENTD_DIR']}/etc"
3
+
4
+ require_relative './conf-utils'
5
+
6
+ def create_file
7
+ File.open("#{ETC_DIR}/fluent.conf", "w") do |f|
8
+ f.write(input_conf)
9
+ f.write(monitoring_conf)
10
+ f.write(input_extract_audit_entities_conf("activities"))
11
+ f.write(input_extract_audit_entities_conf("resources"))
12
+ f.write(input_extract_audit_entities_conf("users"))
13
+ f.write(input_extract_audit_entities_conf("roles"))
14
+ f.write(default_classify_conf)
15
+ f.write(custom_classify_conf)
16
+ f.write(File.read("#{ETC_DIR}/process.conf"))
17
+ f.write(decode_chunk_events_conf)
18
+ f.write(output_conf)
19
+ end
20
+ end
21
+
22
+ create_file
@@ -0,0 +1,39 @@
1
+ # line format: 2021-06-17T19:06:28Z ip-172-31-3-25 strongDM[44495]: 2021-06-17 19:06:28.017624404 +0000 UTC,start,01sfRGaM7BDXf3nQgxxpIpKU7YS7,rs-26a4c33360a277fe,docker-postgres,a-0326fcc060460b7d,Rodolfo Campos,select 'a';,817fa457ceb5e2c1869e011611651e8f8f945584
2
+ <match log.**>
3
+ @type rewrite_tag_filter
4
+ <rule>
5
+ key 2
6
+ pattern /start/
7
+ tag class.start
8
+ </rule>
9
+ <rule>
10
+ key 2
11
+ pattern /complete/
12
+ tag class.complete
13
+ </rule>
14
+ <rule>
15
+ key 2
16
+ pattern /chunk/
17
+ tag class.chunk
18
+ </rule>
19
+ <rule>
20
+ key 2
21
+ pattern /postStart/
22
+ tag class.postStart
23
+ </rule>
24
+ <rule>
25
+ key 2
26
+ pattern /event/
27
+ tag class.event
28
+ </rule>
29
+ <rule>
30
+ key 8
31
+ pattern /activity/
32
+ tag class.activity
33
+ </rule>
34
+ <rule>
35
+ key 2
36
+ pattern /.*/
37
+ tag unclass
38
+ </rule>
39
+ </match>
@@ -0,0 +1,38 @@
1
+ <match log.**>
2
+ @type rewrite_tag_filter
3
+ <rule>
4
+ key type
5
+ pattern /start/
6
+ tag class.start
7
+ </rule>
8
+ <rule>
9
+ key type
10
+ pattern /complete/
11
+ tag class.complete
12
+ </rule>
13
+ <rule>
14
+ key type
15
+ pattern /completed/
16
+ tag class.completed
17
+ </rule>
18
+ <rule>
19
+ key type
20
+ pattern /chunk/
21
+ tag class.chunk
22
+ </rule>
23
+ <rule>
24
+ key type
25
+ pattern /postStart/
26
+ tag class.postStart
27
+ </rule>
28
+ <rule>
29
+ key type
30
+ pattern /activity/
31
+ tag class.activity
32
+ </rule>
33
+ <rule>
34
+ key type
35
+ pattern /.*/
36
+ tag unclass
37
+ </rule>
38
+ </match>
@@ -0,0 +1,94 @@
1
+ # json: {"type":"start","timestamp":"2021-06-23T15:09:03.781664039Z","uuid":"01uM2484l9xAgB88xziqJnIUNjRi","datasourceId":"rs-26a4c33360a277fe","datasourceName":"docker-postgres","userId":"a-0326fcc060460b7d","userName":"Rodolfo Me Campos","query":"select 'b'","hash":"f8bbb2eb1bc17242501ebcba710eaa12325d74fd"}
2
+ <filter class.start>
3
+ @type record_transformer
4
+ enable_ruby true
5
+ <record>
6
+ timestamp ${record["1"].scan(/([0-9]+-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+Z).*/).last.first}
7
+ type ${record["2"]}
8
+ uuid ${record["3"]}
9
+ datasourceId ${record["4"]}
10
+ datasourceName ${record["5"]}
11
+ userId ${record["6"]}
12
+ userName ${record["7"]}
13
+ query ${record["8"]}
14
+ hash ${record["9"]}
15
+ </record>
16
+ remove_keys 1,2,3,4,5,6,7,8,9
17
+ </filter>
18
+
19
+ # json: {"type":"complete","timestamp":"2021-06-23T14:59:54.59849408Z","uuid":"s1uM0wXlLGCPs9LMWhQ9dDaUUj3l","duration":5529,"records":0}
20
+ <filter class.complete>
21
+ @type record_transformer
22
+ enable_ruby true
23
+ <record>
24
+ timestamp ${record["1"].scan(/([0-9]+-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+Z).*/).last.first}
25
+ type ${record["2"]}
26
+ uuid ${record["3"]}
27
+ duration ${record["4"]}
28
+ records ${record["5"]}
29
+ </record>
30
+ remove_keys 1,2,3,4,5,6,7,8,9
31
+ </filter>
32
+
33
+ # json: {"type":"chunk","timestamp":"2021-06-23T15:08:01.495154973Z","uuid":"s1uM1vhfg8KpZlVshQqw1d0nl0QH","chunkId":1,"events":[{"duration":313,"data":"V2VsY29tZSB0byBPcGVuU1NIIFNlcnZlcg0KDQobWz8yMDA0aG9w"},{"duration":0,"data":"ZW5zc2gtc2VydmVyOn4kIA=="},{"duration":1199,"data":"cA=="},{"duration":974,"data":"dw=="},{"duration":338,"data":"ZA=="},{"duration":397,"data":"DQobWz8yMDA0bA0vY29uZmlnDQobWz8yMDA0aG9wZW5zc2gtc2VydmVyOn4kIA=="},{"duration":1466,"data":"ZQ=="},{"duration":146,"data":"eGk="},{"duration":141,"data":"dA=="},{"duration":420,"data":"DQobWz8yMDA0bA1sb2dvdXQNCg=="}],"hash":"f45cb5bf8606ebf22514b8c1e010c13eecf2a1cc"}
34
+ <filter class.chunk>
35
+ @type record_transformer
36
+ enable_ruby true
37
+ <record>
38
+ timestamp ${record["1"].scan(/([0-9]+-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+Z).*/).last.first}
39
+ type ${record["2"]}
40
+ uuid ${record["3"]}
41
+ chunkId ${record["4"]}
42
+ events ${record["5"]}
43
+ hash ${record["6"]}
44
+ </record>
45
+ remove_keys 1,2,3,4,5,6,7,8,9
46
+ </filter>
47
+
48
+ # json: {"type":"postStart","timestamp":"2021-06-23T14:59:54.598376207Z","uuid":"s1uM0wXlLGCPs9LMWhQ9dDaUUj3l","query":"{\"version\":1,\"width\":114,\"height\":24,\"duration\":5.173268588,\"command\":\"\",\"title\":null,\"env\":{\"TERM\":\"xterm-256color\"},\"type\":\"shell\",\"fileName\":null,\"fileSize\":0,\"stdout\":null,\"lastChunkId\":0,\"clientCommand\":null,\"pod\":null,\"container\":null,\"requestMethod\":\"\",\"requestURI\":\"\",\"requestBody\":null}\n","hash":"ee0dd41a2613a2d85029129439d266367ca6740d"}
49
+ <filter class.postStart>
50
+ @type record_transformer
51
+ enable_ruby true
52
+ <record>
53
+ timestamp ${record["1"].scan(/([0-9]+-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+Z).*/).last.first}
54
+ type ${record["2"]}
55
+ uuid ${record["3"]}
56
+ query ${record["4"]}
57
+ hash ${record["5"]}
58
+ </record>
59
+ remove_keys 1,2,3,4,5,6,7,8,9
60
+ </filter>
61
+
62
+ # line: {"1":"2021-06-23T15:26:40Z ip-172-31-3-25 strongDM[861498]: 2021-06-23 15:26:40.110527179 +0000 UTC","2":"event","3":"s1uM4CVcUdhvRB0bnm8yBqcme47c","4":"1","5":"329","6":"V2VsY29tZSB0byBPcGVuU1NIIFNlcnZlcg0KDQobWz8yMDA0aA==","7":null,"8":null,"9":null}
63
+ <filter class.event>
64
+ @type record_transformer
65
+ enable_ruby true
66
+ <record>
67
+ timestamp ${record["1"].scan(/([0-9]+-[0-9]+-[0-9]+T[0-9]+:[0-9]+:[0-9]+Z).*/).last.first}
68
+ type ${record["2"]}
69
+ uuid ${record["3"]}
70
+ undef ${record["4"]} # TODO validate field name
71
+ duration ${record["5"]}
72
+ data ${record["6"]}
73
+ </record>
74
+ remove_keys 1,2,3,4,5,6,7,8,9
75
+ </filter>
76
+
77
+ # line: {"1":"2022-06-16 13:34:56.502034 +0000 UTC","2":"xxx.xxx.xxx.xxx","3":"a-xxx","4":"Admin token","5":"user logged into the local client","6":"Admin token logged into the local client.","7":"activity","8":null,"9":null,"sourceAddress":"127.0.0.1","sourceHostname":"localhost"}
78
+ <filter class.activity>
79
+ @type record_transformer
80
+ enable_ruby true
81
+ <record>
82
+ timestamp ${record["1"].split('.').first}
83
+ ip_address ${record["2"]}
84
+ actorUserID ${record["3"]}
85
+ actorName ${record["4"]}
86
+ activity ${record["5"]}
87
+ description ${record["6"]}
88
+ objects ${record["7"]}
89
+ type ${record["8"]}
90
+ sourceAddress ${record["sourceAddress"]}
91
+ sourceHostname ${record["sourceHostname"]}
92
+ </record>
93
+ remove_keys 1,2,3,4,5,6,7,8,9
94
+ </filter>
@@ -0,0 +1,89 @@
1
+ # json: {"type":"start","timestamp":"2021-06-23 14:59:54.59849408 0000 +UTC","uuid":"01uM2484l9xAgB88xziqJnIUNjRi","datasourceId":"rs-26a4c33360a277fe","datasourceName":"docker-postgres","userId":"a-0326fcc060460b7d","userName":"Rodolfo Me Campos","query":"select 'b'","hash":"f8bbb2eb1bc17242501ebcba710eaa12325d74fd"}
2
+ <filter class.start>
3
+ @type record_transformer
4
+ enable_ruby true
5
+ <record>
6
+ timestamp ${record["1"].split('.').first}
7
+ type ${record["2"]}
8
+ uuid ${record["3"]}
9
+ datasourceId ${record["4"]}
10
+ datasourceName ${record["5"]}
11
+ userId ${record["6"]}
12
+ userName ${record["7"]}
13
+ query ${record["8"]}
14
+ hash ${record["9"]}
15
+ </record>
16
+ remove_keys 1,2,3,4,5,6,7,8,9
17
+ </filter>
18
+
19
+ # json: {"type":"complete","timestamp":"2021-06-23 14:59:54.59849408 0000 +UTC","uuid":"s1uM0wXlLGCPs9LMWhQ9dDaUUj3l","duration":5529,"records":0}
20
+ <filter class.complete>
21
+ @type record_transformer
22
+ enable_ruby true
23
+ <record>
24
+ timestamp ${record["1"].split('.').first}
25
+ type ${record["2"]}
26
+ uuid ${record["3"]}
27
+ duration ${record["4"]}
28
+ records ${record["5"]}
29
+ </record>
30
+ remove_keys 1,2,3,4,5,6,7,8,9
31
+ </filter>
32
+
33
+ # json: {"type":"complete","timestamp":"2021-06-23 14:59:54.59849408 0000 +UTC","uuid":"s1uM0wXlLGCPs9LMWhQ9dDaUUj3l","duration":5529,"records":0}
34
+ <filter class.completed>
35
+ @type record_transformer
36
+ enable_ruby true
37
+ <record>
38
+ timestamp ${record["1"].split('.').first}
39
+ type ${record["2"]}
40
+ uuid ${record["3"]}
41
+ duration ${record["4"]}
42
+ records ${record["5"]}
43
+ </record>
44
+ remove_keys 1,2,3,4,5,6,7,8,9
45
+ </filter>
46
+
47
+ # json: {"type":"chunk","timestamp":"2021-06-23 14:59:54.59849408 0000 +UTC","uuid":"s1uM1vhfg8KpZlVshQqw1d0nl0QH","chunkId":1,"events":[{"duration":313,"data":"V2VsY29tZSB0byBPcGVuU1NIIFNlcnZlcg0KDQobWz8yMDA0aG9w"},{"duration":0,"data":"ZW5zc2gtc2VydmVyOn4kIA=="},{"duration":1199,"data":"cA=="},{"duration":974,"data":"dw=="},{"duration":338,"data":"ZA=="},{"duration":397,"data":"DQobWz8yMDA0bA0vY29uZmlnDQobWz8yMDA0aG9wZW5zc2gtc2VydmVyOn4kIA=="},{"duration":1466,"data":"ZQ=="},{"duration":146,"data":"eGk="},{"duration":141,"data":"dA=="},{"duration":420,"data":"DQobWz8yMDA0bA1sb2dvdXQNCg=="}],"hash":"f45cb5bf8606ebf22514b8c1e010c13eecf2a1cc"}
48
+ <filter class.chunk>
49
+ @type record_transformer
50
+ enable_ruby true
51
+ <record>
52
+ timestamp ${record["1"].split('.').first}
53
+ type ${record["2"]}
54
+ uuid ${record["3"]}
55
+ chunkId ${record["4"]}
56
+ events ${record["5"]}
57
+ hash ${record["6"]}
58
+ </record>
59
+ remove_keys 1,2,3,4,5,6,7,8,9
60
+ </filter>
61
+
62
+ # json: {"type":"postStart","timestamp":"2021-06-23 14:59:54.59849408 0000 +UTC","uuid":"s1uM0wXlLGCPs9LMWhQ9dDaUUj3l","query":"{\"version\":1,\"width\":114,\"height\":24,\"duration\":5.173268588,\"command\":\"\",\"title\":null,\"env\":{\"TERM\":\"xterm-256color\"},\"type\":\"shell\",\"fileName\":null,\"fileSize\":0,\"stdout\":null,\"lastChunkId\":0,\"clientCommand\":null,\"pod\":null,\"container\":null,\"requestMethod\":\"\",\"requestURI\":\"\",\"requestBody\":null}\n","hash":"ee0dd41a2613a2d85029129439d266367ca6740d"}
63
+ <filter class.postStart>
64
+ @type record_transformer
65
+ enable_ruby true
66
+ <record>
67
+ timestamp ${record["1"].split('.').first}
68
+ type ${record["2"]}
69
+ uuid ${record["3"]}
70
+ query ${record["4"]}
71
+ hash ${record["5"]}
72
+ </record>
73
+ remove_keys 1,2,3,4,5,6,7,8,9
74
+ </filter>
75
+
76
+ # line: {"1":"2021-06-23T15:26:40Z ip-172-31-3-25 strongDM[861498]: 2021-06-23 15:26:40.110527179 +0000 UTC","2":"event","3":"s1uM4CVcUdhvRB0bnm8yBqcme47c","4":"1","5":"329","6":"V2VsY29tZSB0byBPcGVuU1NIIFNlcnZlcg0KDQobWz8yMDA0aA==","7":null,"8":null,"9":null}
77
+ <filter class.event>
78
+ @type record_transformer
79
+ enable_ruby true
80
+ <record>
81
+ timestamp ${record["1"].split('.').first}
82
+ type ${record["2"]}
83
+ uuid ${record["3"]}
84
+ undef ${record["4"]} # TODO validate field name
85
+ duration ${record["5"]}
86
+ data ${record["6"]}
87
+ </record>
88
+ remove_keys 1,2,3,4,5,6,7,8,9
89
+ </filter>
@@ -0,0 +1,9 @@
1
+ <source>
2
+ @type exec
3
+ <parse>
4
+ @type json
5
+ </parse>
6
+ tag $tag
7
+ run_interval $interval
8
+ command "ruby #{ENV['FLUENTD_DIR']}/scripts/dump_sdm_entities.rb $entity"
9
+ </source>
@@ -0,0 +1,10 @@
1
+ <source>
2
+ @type tail
3
+ path "#{ENV['LOG_FILE_PATH']}"
4
+ pos_file "#{ENV['LOG_FILE_PATH']}.pos"
5
+ tag log
6
+ <parse>
7
+ @type csv
8
+ keys 1,2,3,4,5,6,7,8,9
9
+ </parse>
10
+ </source>
@@ -0,0 +1,9 @@
1
+ <source>
2
+ @type tail
3
+ path "#{ENV['LOG_FILE_PATH']}"
4
+ pos_file "#{ENV['LOG_FILE_PATH']}.pos"
5
+ tag log
6
+ <parse>
7
+ @type sdm_json
8
+ </parse>
9
+ </source>
@@ -0,0 +1,3 @@
1
+ <filter class.chunk>
2
+ @type sdm_decode_chunk_events
3
+ </filter>
@@ -0,0 +1,13 @@
1
+ <source>
2
+ @type syslog
3
+ protocol_type tcp
4
+ port 5140
5
+ bind 0.0.0.0
6
+ tag log
7
+ source_hostname_key sourceHostname
8
+ source_address_key sourceAddress
9
+ <parse>
10
+ @type csv
11
+ keys 1,2,3,4,5,6,7,8,9
12
+ </parse>
13
+ </source>
@@ -0,0 +1,12 @@
1
+ <source>
2
+ @type syslog
3
+ protocol_type tcp
4
+ port 5140
5
+ bind 0.0.0.0
6
+ tag log
7
+ source_hostname_key sourceHostname
8
+ source_address_key sourceAddress
9
+ <parse>
10
+ @type sdm_json
11
+ </parse>
12
+ </source>
@@ -0,0 +1,12 @@
1
+ <source>
2
+ @type tcp
3
+ tag log # required
4
+ port 5140
5
+ bind 0.0.0.0
6
+ source_hostname_key sourceHostname
7
+ source_address_key sourceAddress
8
+ <parse>
9
+ @type csv
10
+ keys 1,2,3,4,5,6,7,8,9
11
+ </parse>
12
+ </source>
@@ -0,0 +1,11 @@
1
+ <source>
2
+ @type tcp
3
+ tag log # required
4
+ port 5140
5
+ bind 0.0.0.0
6
+ source_hostname_key sourceHostname
7
+ source_address_key sourceAddress
8
+ <parse>
9
+ @type sdm_json
10
+ </parse>
11
+ </source>
@@ -0,0 +1,25 @@
1
+ <source>
2
+ @type prometheus
3
+ bind 0.0.0.0
4
+ port 24321
5
+ metrics_path /metrics
6
+ </source>
7
+ <source>
8
+ @type prometheus_output_monitor
9
+ interval 10
10
+ <labels>
11
+ hostname ${hostname}
12
+ </labels>
13
+ </source>
14
+ <filter class.**>
15
+ @type prometheus
16
+ <metric>
17
+ name fluentd_input_status_num_records_total
18
+ type counter
19
+ desc The total number of incoming records
20
+ <labels>
21
+ tag ${tag}
22
+ hostname ${hostname}
23
+ </labels>
24
+ </metric>
25
+ </filter>
@@ -0,0 +1,9 @@
1
+ <store>
2
+ @type azure-loganalytics
3
+ customer_id "#{ENV['AZURE_LOGANALYTICS_CUSTOMER_ID']}"
4
+ shared_key "#{ENV['AZURE_LOGANALYTICS_SHARED_KEY']}"
5
+ log_type sdm
6
+
7
+ # for more config options
8
+ # see https://github.com/yokawasa/fluent-plugin-azure-loganalytics
9
+ </store>
@@ -0,0 +1,13 @@
1
+ <store>
2
+ @type bigquery_insert
3
+ auth_method json_key
4
+ json_key "{\"private_key\": \"#{ENV['BIGQUERY_PRIVATE_KEY']}\", \"client_email\": \"#{ENV['BIGQUERY_CLIENT_EMAIL']}\"}"
5
+ ignore_unknown_values true
6
+
7
+ project "#{ENV['BIGQUERY_PROJECT_ID']}"
8
+ dataset "#{ENV['BIGQUERY_DATASET_ID']}"
9
+ table "#{ENV['BIGQUERY_TABLE_ID']}"
10
+
11
+ # for more config options
12
+ # see https://github.com/fluent-plugins-nursery/fluent-plugin-bigquery
13
+ </store>
@@ -0,0 +1,11 @@
1
+ <store>
2
+ @type cloudwatch_logs
3
+ aws_key_id "#{ENV['AWS_ACCESS_KEY_ID']}"
4
+ aws_sec_key "#{ENV['AWS_SECRET_ACCESS_KEY']}"
5
+ region "#{ENV['AWS_REGION']}"
6
+ log_group_name "#{ENV['CLOUDWATCH_LOG_GROUP_NAME']}"
7
+ log_stream_name "#{ENV['CLOUDWATCH_LOG_STREAM_NAME']}"
8
+ auto_create_stream true
9
+ # for more config options
10
+ # see https://github.com/fluent-plugins-nursery/fluent-plugin-cloudwatch-logs
11
+ </store>
@@ -0,0 +1,10 @@
1
+ <store>
2
+ @type datadog
3
+ @id "#{ENV['HOSTNAME']}"
4
+ api_key "#{ENV['DATADOG_API_KEY']}"
5
+
6
+ dd_source 'sdm'
7
+
8
+ # for more config options
9
+ # see https://github.com/DataDog/fluent-plugin-datadog
10
+ </store>
@@ -0,0 +1,5 @@
1
+ <store>
2
+ @type elasticsearch
3
+ hosts "#{ENV['ELASTICSEARCH_HOSTS']}"
4
+ index_name "#{ENV['ELASTICSEARCH_INDEX_NAME']}"
5
+ </store>
@@ -0,0 +1,11 @@
1
+ <store>
2
+ @type kafka2
3
+ brokers "#{ENV['KAFKA_BROKERS']}"
4
+ topic "#{ENV['KAFKA_TOPIC']}"
5
+
6
+ <format>
7
+ @type "#{ENV['KAFKA_FORMAT_TYPE'] || 'json'}"
8
+ </format>
9
+ # for more config options
10
+ # see https://github.com/fluent/fluent-plugin-kafka
11
+ </store>
@@ -0,0 +1,8 @@
1
+ <store>
2
+ @type logzio_buffered
3
+
4
+ endpoint_url "#{ENV['LOGZ_ENDPOINT']}"
5
+
6
+ output_include_time true
7
+ output_include_tags true
8
+ </store>
@@ -0,0 +1,5 @@
1
+ <store>
2
+ @type loki
3
+ url "#{ENV['LOKI_URL']}"
4
+ extra_labels {"sdm": "log-export-container"}
5
+ </store>
@@ -0,0 +1,9 @@
1
+ <store>
2
+ @type mongo
3
+ connection_string "#{ENV['MONGO_URI']}"
4
+ collection "#{ENV['MONGO_COLLECTION'] || 'sdm_logs'}"
5
+ remove_tag_prefix class.
6
+ <buffer>
7
+ flush_interval 10s
8
+ </buffer>
9
+ </store>
@@ -0,0 +1,11 @@
1
+ <store>
2
+ @type remote_syslog
3
+ host "#{ENV['REMOTE_SYSLOG_HOST']}"
4
+ port "#{ENV['REMOTE_SYSLOG_PORT']}"
5
+ protocol "#{ENV['REMOTE_SYSLOG_PROTOCOL'] || 'tcp'}"
6
+ packet_size 16384
7
+
8
+ <format>
9
+ @type json
10
+ </format>
11
+ </store>
@@ -0,0 +1,15 @@
1
+ <store>
2
+ @type s3
3
+ aws_key_id "#{ENV['AWS_ACCESS_KEY_ID']}"
4
+ aws_sec_key "#{ENV['AWS_SECRET_ACCESS_KEY']}"
5
+ s3_bucket "#{ENV['S3_BUCKET']}"
6
+ s3_region "#{ENV['S3_REGION']}"
7
+ path "#{ENV['S3_PATH']}/#{ENV['HOSTNAME']}"
8
+ <buffer>
9
+ @type memory
10
+ # for more config options
11
+ # see https://docs.fluentd.org/configuration/buffer-section
12
+ timekey 1h
13
+ timekey_wait 5m
14
+ </buffer>
15
+ </store>