fluent-plugin-elasticsearch 1.9.4 → 5.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
- data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
- data/.github/workflows/issue-auto-closer.yml +12 -0
- data/.github/workflows/linux.yml +26 -0
- data/.github/workflows/macos.yml +26 -0
- data/.github/workflows/windows.yml +26 -0
- data/.travis.yml +33 -6
- data/CONTRIBUTING.md +24 -0
- data/Gemfile +4 -1
- data/History.md +445 -1
- data/ISSUE_TEMPLATE.md +19 -0
- data/README.ElasticsearchGenID.md +116 -0
- data/README.ElasticsearchInput.md +293 -0
- data/README.Troubleshooting.md +692 -0
- data/README.md +1013 -38
- data/appveyor.yml +20 -0
- data/fluent-plugin-elasticsearch.gemspec +15 -9
- data/{Gemfile.v0.12 → gemfiles/Gemfile.elasticsearch.v6} +6 -5
- data/lib/fluent/log-ext.rb +38 -0
- data/lib/fluent/plugin/default-ilm-policy.json +14 -0
- data/lib/fluent/plugin/elasticsearch_constants.rb +13 -0
- data/lib/fluent/plugin/elasticsearch_error.rb +5 -0
- data/lib/fluent/plugin/elasticsearch_error_handler.rb +129 -0
- data/lib/fluent/plugin/elasticsearch_fallback_selector.rb +9 -0
- data/lib/fluent/plugin/elasticsearch_index_lifecycle_management.rb +67 -0
- data/lib/fluent/plugin/elasticsearch_index_template.rb +186 -12
- data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -0
- data/lib/fluent/plugin/elasticsearch_tls.rb +70 -0
- data/lib/fluent/plugin/filter_elasticsearch_genid.rb +77 -0
- data/lib/fluent/plugin/in_elasticsearch.rb +325 -0
- data/lib/fluent/plugin/oj_serializer.rb +22 -0
- data/lib/fluent/plugin/out_elasticsearch.rb +1008 -267
- data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +218 -0
- data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +232 -214
- data/test/plugin/test_alias_template.json +9 -0
- data/test/plugin/test_elasticsearch_error_handler.rb +646 -0
- data/test/plugin/test_elasticsearch_fallback_selector.rb +74 -0
- data/test/plugin/test_elasticsearch_index_lifecycle_management.rb +66 -0
- data/test/plugin/test_elasticsearch_tls.rb +145 -0
- data/test/plugin/test_filter_elasticsearch_genid.rb +215 -0
- data/test/plugin/test_in_elasticsearch.rb +459 -0
- data/test/plugin/test_index_alias_template.json +11 -0
- data/test/plugin/test_index_template.json +25 -0
- data/test/plugin/test_oj_serializer.rb +19 -0
- data/test/plugin/test_out_elasticsearch.rb +5029 -387
- data/test/plugin/test_out_elasticsearch_data_stream.rb +337 -0
- data/test/plugin/test_out_elasticsearch_dynamic.rb +681 -208
- data/test/test_log-ext.rb +35 -0
- metadata +97 -19
@@ -1,264 +1,282 @@
|
|
1
1
|
# encoding: UTF-8
|
2
2
|
require_relative 'out_elasticsearch'
|
3
3
|
|
4
|
-
|
4
|
+
module Fluent::Plugin
|
5
|
+
class ElasticsearchOutputDynamic < ElasticsearchOutput
|
5
6
|
|
6
|
-
|
7
|
+
Fluent::Plugin.register_output('elasticsearch_dynamic', self)
|
7
8
|
|
8
|
-
|
9
|
+
helpers :event_emitter
|
9
10
|
|
10
|
-
|
11
|
-
DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
|
11
|
+
config_param :delimiter, :string, :default => "."
|
12
12
|
|
13
|
-
|
13
|
+
DYNAMIC_PARAM_NAMES = %W[hosts host port include_timestamp logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
|
14
|
+
DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
|
14
15
|
|
15
|
-
|
16
|
-
super
|
16
|
+
RequestInfo = Struct.new(:host, :index)
|
17
17
|
|
18
|
-
|
19
|
-
@dynamic_config = {}
|
20
|
-
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
21
|
-
value = expand_param(self.instance_variable_get(var), nil, nil, nil)
|
22
|
-
key = DYNAMIC_PARAM_NAMES[i]
|
23
|
-
@dynamic_config[key] = value.to_s
|
24
|
-
}
|
25
|
-
# end eval all configs
|
26
|
-
@current_config = nil
|
27
|
-
end
|
18
|
+
attr_reader :dynamic_config
|
28
19
|
|
29
|
-
|
30
|
-
|
31
|
-
end
|
20
|
+
def configure(conf)
|
21
|
+
super
|
32
22
|
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
reload_connections: @reload_connections,
|
47
|
-
reload_on_failure: @reload_on_failure,
|
48
|
-
resurrect_after: @resurrect_after,
|
49
|
-
retry_on_failure: 5,
|
50
|
-
transport_options: {
|
51
|
-
headers: { 'Content-Type' => 'application/json' },
|
52
|
-
request: { timeout: @request_timeout },
|
53
|
-
ssl: { verify: @ssl_verify, ca_file: @ca_file }
|
54
|
-
}
|
55
|
-
}), &adapter_conf)
|
56
|
-
es = Elasticsearch::Client.new transport: transport
|
23
|
+
# evaluate all configurations here
|
24
|
+
@dynamic_config = {}
|
25
|
+
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
26
|
+
value = expand_param(self.instance_variable_get(var), nil, nil, nil)
|
27
|
+
key = DYNAMIC_PARAM_NAMES[i]
|
28
|
+
@dynamic_config[key] = value.to_s
|
29
|
+
}
|
30
|
+
# end eval all configs
|
31
|
+
end
|
32
|
+
|
33
|
+
def create_meta_config_map
|
34
|
+
{'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => @routing_key_name}
|
35
|
+
end
|
57
36
|
|
58
|
-
begin
|
59
|
-
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description(host)})!" unless es.ping
|
60
|
-
rescue *es.transport.host_unreachable_exceptions => e
|
61
|
-
raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description(host)})! #{e.message}"
|
62
|
-
end
|
63
37
|
|
64
|
-
|
65
|
-
|
38
|
+
def client(host = nil, compress_connection = false)
|
39
|
+
# check here to see if we already have a client connection for the given host
|
40
|
+
connection_options = get_connection_options(host)
|
41
|
+
|
42
|
+
@_es = nil unless is_existing_connection(connection_options[:hosts])
|
43
|
+
@_es = nil unless @compressable_connection == compress_connection
|
44
|
+
|
45
|
+
@_es ||= begin
|
46
|
+
@compressable_connection = compress_connection
|
47
|
+
@current_config = connection_options[:hosts].clone
|
48
|
+
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
|
49
|
+
gzip_headers = if compress_connection
|
50
|
+
{'Content-Encoding' => 'gzip'}
|
51
|
+
else
|
52
|
+
{}
|
53
|
+
end
|
54
|
+
headers = { 'Content-Type' => @content_type.to_s, }.merge(gzip_headers)
|
55
|
+
ssl_options = { verify: @ssl_verify, ca_file: @ca_file}.merge(@ssl_version_options)
|
56
|
+
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
|
57
|
+
options: {
|
58
|
+
reload_connections: @reload_connections,
|
59
|
+
reload_on_failure: @reload_on_failure,
|
60
|
+
resurrect_after: @resurrect_after,
|
61
|
+
logger: @transport_logger,
|
62
|
+
transport_options: {
|
63
|
+
headers: headers,
|
64
|
+
request: { timeout: @request_timeout },
|
65
|
+
ssl: ssl_options,
|
66
|
+
},
|
67
|
+
http: {
|
68
|
+
user: @user,
|
69
|
+
password: @password,
|
70
|
+
scheme: @scheme
|
71
|
+
},
|
72
|
+
compression: compress_connection,
|
73
|
+
}), &adapter_conf)
|
74
|
+
Elasticsearch::Client.new transport: transport
|
75
|
+
end
|
66
76
|
end
|
67
|
-
end
|
68
77
|
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
78
|
+
def get_connection_options(con_host)
|
79
|
+
raise "`password` must be present if `user` is present" if @user && !@password
|
80
|
+
|
81
|
+
hosts = if con_host || @hosts
|
82
|
+
(con_host || @hosts).split(',').map do |host_str|
|
83
|
+
# Support legacy hosts format host:port,host:port,host:port...
|
84
|
+
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
85
|
+
{
|
86
|
+
host: host_str.split(':')[0],
|
87
|
+
port: (host_str.split(':')[1] || @port).to_i,
|
88
|
+
scheme: @scheme.to_s
|
89
|
+
}
|
90
|
+
else
|
91
|
+
# New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
|
92
|
+
uri = URI(get_escaped_userinfo(host_str))
|
93
|
+
%w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
|
94
|
+
hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
|
95
|
+
hash
|
96
|
+
end
|
87
97
|
end
|
88
|
-
end
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
98
|
+
end.compact
|
99
|
+
else
|
100
|
+
[{host: @host, port: @port.to_i, scheme: @scheme.to_s}]
|
101
|
+
end.each do |host|
|
102
|
+
host.merge!(user: @user, password: @password) if !host[:user] && @user
|
103
|
+
host.merge!(path: @path) if !host[:path] && @path
|
104
|
+
end
|
105
|
+
|
106
|
+
{
|
107
|
+
hosts: hosts
|
108
|
+
}
|
95
109
|
end
|
96
110
|
|
97
|
-
|
98
|
-
hosts
|
99
|
-
|
100
|
-
|
111
|
+
def connection_options_description(host)
|
112
|
+
get_connection_options(host)[:hosts].map do |host_info|
|
113
|
+
attributes = host_info.dup
|
114
|
+
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
|
115
|
+
attributes.inspect
|
116
|
+
end.join(', ')
|
117
|
+
end
|
101
118
|
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
|
106
|
-
attributes.inspect
|
107
|
-
end.join(', ')
|
108
|
-
end
|
119
|
+
def multi_workers_ready?
|
120
|
+
true
|
121
|
+
end
|
109
122
|
|
110
|
-
|
111
|
-
|
112
|
-
|
123
|
+
def write(chunk)
|
124
|
+
bulk_message = Hash.new { |h,k| h[k] = '' }
|
125
|
+
dynamic_conf = @dynamic_config.clone
|
113
126
|
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
127
|
+
headers = {
|
128
|
+
UPDATE_OP => {},
|
129
|
+
UPSERT_OP => {},
|
130
|
+
CREATE_OP => {},
|
131
|
+
INDEX_OP => {}
|
132
|
+
}
|
120
133
|
|
121
|
-
|
122
|
-
next unless record.is_a? Hash
|
134
|
+
tag = chunk.metadata.tag
|
123
135
|
|
124
|
-
|
125
|
-
|
126
|
-
k = DYNAMIC_PARAM_NAMES[i]
|
127
|
-
v = self.instance_variable_get(var)
|
128
|
-
# check here to determine if we should evaluate
|
129
|
-
if dynamic_conf[k] != v
|
130
|
-
value = expand_param(v, tag, time, record)
|
131
|
-
dynamic_conf[k] = value
|
132
|
-
end
|
133
|
-
}
|
134
|
-
# end eval all configs
|
136
|
+
chunk.msgpack_each do |time, record|
|
137
|
+
next unless record.is_a? Hash
|
135
138
|
|
136
|
-
|
137
|
-
|
138
|
-
time = Time.parse record["@timestamp"]
|
139
|
-
elsif record.has_key?(dynamic_conf['time_key'])
|
140
|
-
time = Time.parse record[dynamic_conf['time_key']]
|
141
|
-
record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
|
142
|
-
else
|
143
|
-
record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
|
139
|
+
if @flatten_hashes
|
140
|
+
record = flatten_record(record)
|
144
141
|
end
|
145
142
|
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
143
|
+
begin
|
144
|
+
# evaluate all configurations here
|
145
|
+
DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
|
146
|
+
k = DYNAMIC_PARAM_NAMES[i]
|
147
|
+
v = self.instance_variable_get(var)
|
148
|
+
# check here to determine if we should evaluate
|
149
|
+
if dynamic_conf[k] != v
|
150
|
+
value = expand_param(v, tag, time, record)
|
151
|
+
dynamic_conf[k] = value
|
152
|
+
end
|
153
|
+
}
|
154
|
+
# end eval all configs
|
155
|
+
rescue => e
|
156
|
+
# handle dynamic parameters misconfigurations
|
157
|
+
router.emit_error_event(tag, time, record, e)
|
158
|
+
next
|
150
159
|
end
|
151
|
-
else
|
152
|
-
target_index = dynamic_conf['index_name']
|
153
|
-
end
|
154
160
|
|
155
|
-
|
156
|
-
|
157
|
-
|
161
|
+
if eval_or_val(dynamic_conf['logstash_format']) || eval_or_val(dynamic_conf['include_timestamp'])
|
162
|
+
if record.has_key?("@timestamp")
|
163
|
+
time = Time.parse record["@timestamp"]
|
164
|
+
elsif record.has_key?(dynamic_conf['time_key'])
|
165
|
+
time = Time.parse record[dynamic_conf['time_key']]
|
166
|
+
record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
|
167
|
+
else
|
168
|
+
record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
|
169
|
+
end
|
170
|
+
end
|
158
171
|
|
159
|
-
|
160
|
-
|
161
|
-
|
172
|
+
if eval_or_val(dynamic_conf['logstash_format'])
|
173
|
+
if eval_or_val(dynamic_conf['utc_index'])
|
174
|
+
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
175
|
+
else
|
176
|
+
target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
|
177
|
+
end
|
178
|
+
else
|
179
|
+
target_index = dynamic_conf['index_name']
|
180
|
+
end
|
162
181
|
|
163
|
-
|
182
|
+
# Change target_index to lower-case since Elasticsearch doesn't
|
183
|
+
# allow upper-case characters in index names.
|
184
|
+
target_index = target_index.downcase
|
164
185
|
|
165
|
-
|
166
|
-
|
167
|
-
meta[meta_key] = record[dynamic_conf[config_name]]
|
186
|
+
if @include_tag_key
|
187
|
+
record.merge!(dynamic_conf['tag_key'] => tag)
|
168
188
|
end
|
169
|
-
end
|
170
189
|
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
190
|
+
if dynamic_conf['hosts']
|
191
|
+
host = dynamic_conf['hosts']
|
192
|
+
else
|
193
|
+
host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
|
194
|
+
end
|
176
195
|
|
177
|
-
|
178
|
-
|
179
|
-
|
196
|
+
if @include_index_in_url
|
197
|
+
key = RequestInfo.new(host, target_index)
|
198
|
+
meta = {"_type" => dynamic_conf['type_name']}
|
199
|
+
else
|
200
|
+
key = RequestInfo.new(host, nil)
|
201
|
+
meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
|
202
|
+
end
|
180
203
|
|
181
|
-
|
182
|
-
|
183
|
-
|
204
|
+
@meta_config_map.each_pair do |config_name, meta_key|
|
205
|
+
if dynamic_conf[config_name] && accessor = record_accessor_create(dynamic_conf[config_name])
|
206
|
+
if raw_value = accessor.call(record)
|
207
|
+
meta[meta_key] = raw_value
|
208
|
+
end
|
209
|
+
end
|
210
|
+
end
|
184
211
|
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
end
|
189
|
-
end
|
212
|
+
if @remove_keys
|
213
|
+
@remove_keys.each { |key| record.delete(key) }
|
214
|
+
end
|
190
215
|
|
191
|
-
|
192
|
-
|
193
|
-
begin
|
194
|
-
response = client(host).bulk body: data
|
195
|
-
if response['errors']
|
196
|
-
log.error "Could not push log to Elasticsearch: #{response}"
|
216
|
+
write_op = dynamic_conf["write_operation"]
|
217
|
+
append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[key])
|
197
218
|
end
|
198
|
-
|
199
|
-
|
200
|
-
|
201
|
-
|
202
|
-
log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
|
203
|
-
sleep 2**retries
|
204
|
-
retry
|
219
|
+
|
220
|
+
bulk_message.each do |info, msgs|
|
221
|
+
send_bulk(msgs, info.host, info.index) unless msgs.empty?
|
222
|
+
msgs.clear
|
205
223
|
end
|
206
|
-
raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
|
207
|
-
rescue Exception
|
208
|
-
@_es = nil if @reconnect_on_error
|
209
|
-
raise
|
210
224
|
end
|
211
|
-
end
|
212
225
|
|
213
|
-
|
214
|
-
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
|
219
|
-
|
220
|
-
|
221
|
-
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
|
226
|
-
|
227
|
-
|
228
|
-
# pull out section between ${} then eval
|
229
|
-
inner = param.clone
|
230
|
-
while inner.match(/\${.+}/)
|
231
|
-
to_eval = inner.match(/\${(.+?)}/){$1}
|
232
|
-
|
233
|
-
if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
|
234
|
-
return to_eval
|
235
|
-
elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
|
236
|
-
return to_eval
|
237
|
-
elsif !(to_eval =~/time/).nil? && time.nil?
|
238
|
-
return to_eval
|
239
|
-
else
|
240
|
-
inner.sub!(/\${.+?}/, eval( to_eval ))
|
226
|
+
def send_bulk(data, host, index)
|
227
|
+
begin
|
228
|
+
prepared_data = if compression
|
229
|
+
gzip(data)
|
230
|
+
else
|
231
|
+
data
|
232
|
+
end
|
233
|
+
response = client(host, compression).bulk body: prepared_data, index: index
|
234
|
+
if response['errors']
|
235
|
+
log.error "Could not push log to Elasticsearch: #{response}"
|
236
|
+
end
|
237
|
+
rescue => e
|
238
|
+
@_es = nil if @reconnect_on_error
|
239
|
+
# FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
|
240
|
+
raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(host)}): #{e.message}"
|
241
241
|
end
|
242
242
|
end
|
243
|
-
inner
|
244
|
-
end
|
245
|
-
|
246
|
-
def is_valid_expand_param_type(param)
|
247
|
-
return false if [:@buffer_type].include?(param)
|
248
|
-
return self.instance_variable_get(param).is_a?(String)
|
249
|
-
end
|
250
243
|
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
|
244
|
+
def eval_or_val(var)
|
245
|
+
return var unless var.is_a?(String)
|
246
|
+
eval(var)
|
247
|
+
end
|
255
248
|
|
256
|
-
|
257
|
-
|
258
|
-
|
249
|
+
def expand_param(param, tag, time, record)
|
250
|
+
# check for '${ ... }'
|
251
|
+
# yes => `eval`
|
252
|
+
# no => return param
|
253
|
+
return param if (param.to_s =~ /\${.+}/).nil?
|
254
|
+
|
255
|
+
# check for 'tag_parts[]'
|
256
|
+
# separated by a delimiter (default '.')
|
257
|
+
tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?
|
258
|
+
|
259
|
+
# pull out section between ${} then eval
|
260
|
+
inner = param.clone
|
261
|
+
while inner.match(/\${.+}/)
|
262
|
+
to_eval = inner.match(/\${(.+?)}/){$1}
|
263
|
+
|
264
|
+
if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
|
265
|
+
return to_eval
|
266
|
+
elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
|
267
|
+
return to_eval
|
268
|
+
elsif !(to_eval =~/time/).nil? && time.nil?
|
269
|
+
return to_eval
|
270
|
+
else
|
271
|
+
inner.sub!(/\${.+?}/, eval( to_eval ))
|
272
|
+
end
|
259
273
|
end
|
274
|
+
inner
|
260
275
|
end
|
261
276
|
|
262
|
-
|
277
|
+
def is_valid_expand_param_type(param)
|
278
|
+
return false if [:@buffer_type].include?(param)
|
279
|
+
return self.instance_variable_get(param).is_a?(String)
|
280
|
+
end
|
263
281
|
end
|
264
282
|
end
|