logstash-output-thinkingdata 1.1.1 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 36f24c7196c243ee0ec3df9f446f98e60c48d860e6404d6e38897ba2841a99fb
4
- data.tar.gz: 9b9fbb9076a958cda52e69775bf23fdfd947c6925ede50cdd86f6d8138f58a2c
3
+ metadata.gz: a59cb189b3b65b95a23f1c54039948220adc9701d61425441c21e6ae13f0a207
4
+ data.tar.gz: bffcdc6a7f831a884cef5dfb94d9da423f356c86994fa9e17da2350d7a552002
5
5
  SHA512:
6
- metadata.gz: 2cc68f4e54b11095e2226772967499e4097175a0d4647f32b02eda64b6f5ecfb29f15d7d6e500b6cf92f903e914bdb7880611f713882e7b4a34e7347432a3f19
7
- data.tar.gz: d60a81599ec0cf34c697676a85ed6d249e5de8f8d3da74adf6afcdaa42bf01b7e6038d08d8de990478872c023db548df1bafacc243d7c9133e0b9a408cbcf056
6
+ metadata.gz: '048904134164411ec9b61e60719a264b43c2d4dc7faa4220eb47a88d52077090b6c4ca5b0259cba32c3707d1bc67564bedd3aeb82ed84ddd9dbdea5a1e042956'
7
+ data.tar.gz: 7f7ff7da269b11ac1ce317c3562ac8371bb92a92d6338c9a13a55902a9112e05f6f3d5b90d5a561bcc894021f327762ee88340c6b102f799c149864403a3e87a
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ **v1.2.0** (2023-04-25)
2
+ - 支持message中传多条数据
3
+
1
4
  **v1.1.1** (2021-07-10)
2
5
  - 增加json格式校验
3
6
 
@@ -6,7 +6,6 @@ require "stud/buffer"
6
6
  require "zlib"
7
7
  require "json"
8
8
 
9
-
10
9
  # An thinkingdata output that does nothing.
11
10
  class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
12
11
 
@@ -42,7 +41,7 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
42
41
  # 是否检测appid
43
42
  config :appid_check, :validate => :boolean, :default => false
44
43
 
45
- PLUGIN_VERSION = "1.1.1"
44
+ PLUGIN_VERSION = "1.2.0"
46
45
 
47
46
  public
48
47
 
@@ -67,9 +66,9 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
67
66
  @last_report_count = 0
68
67
  @total_send_count = 0
69
68
  buffer_config = {
70
- :max_items => @flush_batch_size.to_i,
71
- :max_interval => @flush_interval_sec.to_i,
72
- :logger => @logger
69
+ :max_items => @flush_batch_size.to_i,
70
+ :max_interval => @flush_interval_sec.to_i,
71
+ :logger => @logger
73
72
  }
74
73
  buffer_initialize(buffer_config)
75
74
  @filebeat_status = {} if @is_filebeat_status_record
@@ -99,6 +98,20 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
99
98
  end
100
99
  end
101
100
 
101
+ def send_content(content, event)
102
+ content['#uuid'] = SecureRandom.uuid if @uuid
103
+ if is_filebeat_input?(event) #filebeat input 记录
104
+ host = event.get("[host][name]")
105
+ file = event.get("[log][file][path]")
106
+ file = event.get("[source]") if file.nil?
107
+ offset = event.get("[log][offset]")
108
+ offset = event.get("[offset]") if offset.nil?
109
+ log_detail = "host: #{host}, file: #{file}"
110
+ record_filebeat_status(log_detail, offset) if @is_filebeat_status_record
111
+ end
112
+ buffer_receive(content)
113
+ end
114
+
102
115
  public
103
116
 
104
117
  def multi_receive(events)
@@ -106,18 +119,20 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
106
119
  @receive_count += events.length
107
120
  events.each do |event|
108
121
  begin
109
- content = JSON.parse(event.get("message"))
110
- content['#uuid'] = SecureRandom.uuid if @uuid
111
- if is_filebeat_input?(event) #filebeat input 记录
112
- host = event.get("[host][name]")
113
- file = event.get("[log][file][path]")
114
- file = event.get("[source]") if file.nil?
115
- offset = event.get("[log][offset]")
116
- offset = event.get("[offset]") if offset.nil?
117
- log_detail = "host: #{host}, file: #{file}"
118
- record_filebeat_status(log_detail, offset) if @is_filebeat_status_record
122
+ message = event.get("message")
123
+ # 判断 message 中的数据是否为json array
124
+ if message[0, 1] == "["
125
+ contents = JSON.parse(message)
126
+ contents.each do |content|
127
+ begin
128
+ send_content(content, event)
129
+ end
130
+ end
131
+ else
132
+ content = JSON.parse(message)
133
+ send_content(content, event)
119
134
  end
120
- buffer_receive(content)
135
+
121
136
  rescue => e
122
137
  @logger.error("Could not process content", :content => event.to_s, :Exception => e)
123
138
  @parse_error_count += 1
@@ -154,13 +169,13 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
154
169
  compress_type = 'gzip'
155
170
  end
156
171
  if @appid.nil? || @appid.empty?
157
- headers = {'custom_appid' => 'true', 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
158
- 'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
159
- 'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s}
172
+ headers = { 'custom_appid' => 'true', 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
173
+ 'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
174
+ 'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s }
160
175
  else
161
- headers = {'appid' => @appid, 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
162
- 'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
163
- 'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s}
176
+ headers = { 'appid' => @appid, 'version' => PLUGIN_VERSION, 'user-agent' => 'logstash_' + PLUGIN_VERSION,
177
+ 'compress' => compress_type, 'TA-Integration-Type' => 'logstash',
178
+ 'TA-Integration-Version' => PLUGIN_VERSION, 'TA-Integration-Count' => events.length.to_s }
164
179
  end
165
180
 
166
181
  until do_send(data, headers)
@@ -202,7 +217,7 @@ class LogStash::Outputs::Thinkingdata < LogStash::Outputs::Base
202
217
  def record_filebeat_status(log_detail, offset)
203
218
  status = @filebeat_status[log_detail]
204
219
  if status.nil?
205
- status = {:receive_time => Time.now, :offset => offset}
220
+ status = { :receive_time => Time.now, :offset => offset }
206
221
  @filebeat_status[log_detail] = status
207
222
  else
208
223
  status[:offset] = offset
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-thinkingdata'
3
- s.version = '1.1.1'
3
+ s.version = '1.2.0'
4
4
  s.licenses = ['Apache-2.0']
5
5
  s.summary = 'Output plugin for Thinkingdata Analytics'
6
6
  s.description = 'This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program.'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-thinkingdata
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.1
4
+ version: 1.2.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - sdk
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-07-10 00:00:00.000000000 Z
11
+ date: 2023-04-25 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: logstash-core-plugin-api
@@ -144,7 +144,7 @@ licenses:
144
144
  metadata:
145
145
  logstash_plugin: 'true'
146
146
  logstash_group: output
147
- post_install_message:
147
+ post_install_message:
148
148
  rdoc_options: []
149
149
  require_paths:
150
150
  - lib
@@ -159,8 +159,8 @@ required_rubygems_version: !ruby/object:Gem::Requirement
159
159
  - !ruby/object:Gem::Version
160
160
  version: '0'
161
161
  requirements: []
162
- rubygems_version: 3.0.8
163
- signing_key:
162
+ rubygems_version: 3.0.3.1
163
+ signing_key:
164
164
  specification_version: 4
165
165
  summary: Output plugin for Thinkingdata Analytics
166
166
  test_files: