logstash-output-thinkingdata 1.1.0 → 1.2.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/lib/logstash/outputs/thinkingdata.rb +57 -25
- data/logstash-output-thinkingdata.gemspec +1 -1
- metadata +6 -6
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: a59cb189b3b65b95a23f1c54039948220adc9701d61425441c21e6ae13f0a207
|
4
|
+
data.tar.gz: bffcdc6a7f831a884cef5dfb94d9da423f356c86994fa9e17da2350d7a552002
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: '048904134164411ec9b61e60719a264b43c2d4dc7faa4220eb47a88d52077090b6c4ca5b0259cba32c3707d1bc67564bedd3aeb82ed84ddd9dbdea5a1e042956'
|
7
|
+
data.tar.gz: 7f7ff7da269b11ac1ce317c3562ac8371bb92a92d6338c9a13a55902a9112e05f6f3d5b90d5a561bcc894021f327762ee88340c6b102f799c149864403a3e87a
|
data/CHANGELOG.md
CHANGED
@@ -6,7 +6,6 @@ require "stud/buffer"
|
|
6
6
|
require "zlib"
|
7
7
|
require "json"
|
8
8
|
|
9
|
-
|
10
9
|
# An thinkingdata output that does nothing.
|
11
10
|
class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
12
11
|
|
@@ -42,7 +41,7 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
42
41
|
# 是否检测appid
|
43
42
|
config :appid_check, :validate => :boolean, :default => false
|
44
43
|
|
45
|
-
PLUGIN_VERSION = "1.
|
44
|
+
PLUGIN_VERSION = "1.2.0"
|
46
45
|
|
47
46
|
public
|
48
47
|
|
@@ -67,9 +66,9 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
67
66
|
@last_report_count = 0
|
68
67
|
@total_send_count = 0
|
69
68
|
buffer_config = {
|
70
|
-
|
71
|
-
|
72
|
-
|
69
|
+
:max_items => @flush_batch_size.to_i,
|
70
|
+
:max_interval => @flush_interval_sec.to_i,
|
71
|
+
:logger => @logger
|
73
72
|
}
|
74
73
|
buffer_initialize(buffer_config)
|
75
74
|
@filebeat_status = {} if @is_filebeat_status_record
|
@@ -99,6 +98,20 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
99
98
|
end
|
100
99
|
end
|
101
100
|
|
101
|
+
def send_content(content, event)
|
102
|
+
content['#uuid'] = SecureRandom.uuid if @uuid
|
103
|
+
if is_filebeat_input?(event) #filebeat input 记录
|
104
|
+
host = event.get("[host][name]")
|
105
|
+
file = event.get("[log][file][path]")
|
106
|
+
file = event.get("[source]") if file.nil?
|
107
|
+
offset = event.get("[log][offset]")
|
108
|
+
offset = event.get("[offset]") if offset.nil?
|
109
|
+
log_detail = "host: #{host}, file: #{file}"
|
110
|
+
record_filebeat_status(log_detail, offset) if @is_filebeat_status_record
|
111
|
+
end
|
112
|
+
buffer_receive(content)
|
113
|
+
end
|
114
|
+
|
102
115
|
public
|
103
116
|
|
104
117
|
def multi_receive(events)
|
@@ -106,18 +119,20 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
106
119
|
@receive_count += events.length
|
107
120
|
events.each do |event|
|
108
121
|
begin
|
109
|
-
|
110
|
-
|
111
|
-
if
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
122
|
+
message = event.get("message")
|
123
|
+
# 判断 message 中的数据是否为json array
|
124
|
+
if message[0, 1] == "["
|
125
|
+
contents = JSON.parse(message)
|
126
|
+
contents.each do |content|
|
127
|
+
begin
|
128
|
+
send_content(content, event)
|
129
|
+
end
|
130
|
+
end
|
131
|
+
else
|
132
|
+
content = JSON.parse(message)
|
133
|
+
send_content(content, event)
|
119
134
|
end
|
120
|
-
|
135
|
+
|
121
136
|
rescue => e
|
122
137
|
@logger.error("Could not process content", :content => event.to_s, :Exception => e)
|
123
138
|
@parse_error_count += 1
|
@@ -140,26 +155,27 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
140
155
|
public
|
141
156
|
|
142
157
|
def flush(events, final)
|
158
|
+
events = data_valid(events)
|
159
|
+
data = events.to_json
|
143
160
|
if @compress == 0
|
144
|
-
data = events.to_json
|
145
161
|
compress_type = 'none'
|
146
162
|
else
|
147
163
|
gz = StringIO.new("w")
|
148
164
|
gz.set_encoding("BINARY")
|
149
165
|
z = Zlib::GzipWriter.new(gz)
|
150
|
-
z.write(
|
166
|
+
z.write(data)
|
151
167
|
z.close
|
152
168
|
data = gz.string
|
153
169
|
compress_type = 'gzip'
|
154
170
|
end
|
155
171
|
if @appid.nil? || @appid.empty?
|
156
|
-
headers = {'custom_appid' => 'true', 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
|
157
|
-
|
158
|
-
|
172
|
+
headers = { 'custom_appid' => 'true', 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
|
173
|
+
'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
|
174
|
+
'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s }
|
159
175
|
else
|
160
|
-
headers = {'appid' => @appid, 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
|
161
|
-
|
162
|
-
|
176
|
+
headers = { 'appid' => @appid, 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
|
177
|
+
'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
|
178
|
+
'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s }
|
163
179
|
end
|
164
180
|
|
165
181
|
until do_send(data, headers)
|
@@ -201,7 +217,7 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
201
217
|
def record_filebeat_status(log_detail, offset)
|
202
218
|
status = @filebeat_status[log_detail]
|
203
219
|
if status.nil?
|
204
|
-
status = {:receive_time => Time.now, :offset => offset}
|
220
|
+
status = { :receive_time => Time.now, :offset => offset }
|
205
221
|
@filebeat_status[log_detail] = status
|
206
222
|
else
|
207
223
|
status[:offset] = offset
|
@@ -233,6 +249,22 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
|
|
233
249
|
result
|
234
250
|
end
|
235
251
|
|
252
|
+
private
|
253
|
+
|
254
|
+
def data_valid(events)
|
255
|
+
result = []
|
256
|
+
events.each do |event|
|
257
|
+
begin
|
258
|
+
event.to_json
|
259
|
+
result << event
|
260
|
+
rescue => e
|
261
|
+
@logger.error("Could not parse events", :content => event.to_s, :Exception => e)
|
262
|
+
@parse_error_count += 1
|
263
|
+
end
|
264
|
+
end
|
265
|
+
return result
|
266
|
+
end
|
267
|
+
|
236
268
|
end
|
237
269
|
|
238
270
|
# class LogStash::Outputs::Thinkingdata
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-thinkingdata'
|
3
|
-
s.version = '1.
|
3
|
+
s.version = '1.2.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = 'Output plugin for Thinkingdata Analytics'
|
6
6
|
s.description = 'This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program.'
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-thinkingdata
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- sdk
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2023-04-25 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core-plugin-api
|
@@ -144,7 +144,7 @@ licenses:
|
|
144
144
|
metadata:
|
145
145
|
logstash_plugin: 'true'
|
146
146
|
logstash_group: output
|
147
|
-
post_install_message:
|
147
|
+
post_install_message:
|
148
148
|
rdoc_options: []
|
149
149
|
require_paths:
|
150
150
|
- lib
|
@@ -159,8 +159,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
159
159
|
- !ruby/object:Gem::Version
|
160
160
|
version: '0'
|
161
161
|
requirements: []
|
162
|
-
rubygems_version: 3.0.
|
163
|
-
signing_key:
|
162
|
+
rubygems_version: 3.0.3.1
|
163
|
+
signing_key:
|
164
164
|
specification_version: 4
|
165
165
|
summary: Output plugin for Thinkingdata Analytics
|
166
166
|
test_files:
|