fluent-plugin-elasticsearch 1.9.4 → 5.0.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +5 -5
  2. data/.github/ISSUE_TEMPLATE/bug_report.md +37 -0
  3. data/.github/ISSUE_TEMPLATE/feature_request.md +24 -0
  4. data/.github/workflows/issue-auto-closer.yml +12 -0
  5. data/.github/workflows/linux.yml +26 -0
  6. data/.github/workflows/macos.yml +26 -0
  7. data/.github/workflows/windows.yml +26 -0
  8. data/.travis.yml +33 -6
  9. data/CONTRIBUTING.md +24 -0
  10. data/Gemfile +4 -1
  11. data/History.md +445 -1
  12. data/ISSUE_TEMPLATE.md +19 -0
  13. data/README.ElasticsearchGenID.md +116 -0
  14. data/README.ElasticsearchInput.md +293 -0
  15. data/README.Troubleshooting.md +692 -0
  16. data/README.md +1013 -38
  17. data/appveyor.yml +20 -0
  18. data/fluent-plugin-elasticsearch.gemspec +15 -9
  19. data/{Gemfile.v0.12 → gemfiles/Gemfile.elasticsearch.v6} +6 -5
  20. data/lib/fluent/log-ext.rb +38 -0
  21. data/lib/fluent/plugin/default-ilm-policy.json +14 -0
  22. data/lib/fluent/plugin/elasticsearch_constants.rb +13 -0
  23. data/lib/fluent/plugin/elasticsearch_error.rb +5 -0
  24. data/lib/fluent/plugin/elasticsearch_error_handler.rb +129 -0
  25. data/lib/fluent/plugin/elasticsearch_fallback_selector.rb +9 -0
  26. data/lib/fluent/plugin/elasticsearch_index_lifecycle_management.rb +67 -0
  27. data/lib/fluent/plugin/elasticsearch_index_template.rb +186 -12
  28. data/lib/fluent/plugin/elasticsearch_simple_sniffer.rb +10 -0
  29. data/lib/fluent/plugin/elasticsearch_tls.rb +70 -0
  30. data/lib/fluent/plugin/filter_elasticsearch_genid.rb +77 -0
  31. data/lib/fluent/plugin/in_elasticsearch.rb +325 -0
  32. data/lib/fluent/plugin/oj_serializer.rb +22 -0
  33. data/lib/fluent/plugin/out_elasticsearch.rb +1008 -267
  34. data/lib/fluent/plugin/out_elasticsearch_data_stream.rb +218 -0
  35. data/lib/fluent/plugin/out_elasticsearch_dynamic.rb +232 -214
  36. data/test/plugin/test_alias_template.json +9 -0
  37. data/test/plugin/test_elasticsearch_error_handler.rb +646 -0
  38. data/test/plugin/test_elasticsearch_fallback_selector.rb +74 -0
  39. data/test/plugin/test_elasticsearch_index_lifecycle_management.rb +66 -0
  40. data/test/plugin/test_elasticsearch_tls.rb +145 -0
  41. data/test/plugin/test_filter_elasticsearch_genid.rb +215 -0
  42. data/test/plugin/test_in_elasticsearch.rb +459 -0
  43. data/test/plugin/test_index_alias_template.json +11 -0
  44. data/test/plugin/test_index_template.json +25 -0
  45. data/test/plugin/test_oj_serializer.rb +19 -0
  46. data/test/plugin/test_out_elasticsearch.rb +5029 -387
  47. data/test/plugin/test_out_elasticsearch_data_stream.rb +337 -0
  48. data/test/plugin/test_out_elasticsearch_dynamic.rb +681 -208
  49. data/test/test_log-ext.rb +35 -0
  50. metadata +97 -19
@@ -1,264 +1,282 @@
1
1
  # encoding: UTF-8
2
2
  require_relative 'out_elasticsearch'
3
3
 
4
- class Fluent::ElasticsearchOutputDynamic < Fluent::ElasticsearchOutput
4
+ module Fluent::Plugin
5
+ class ElasticsearchOutputDynamic < ElasticsearchOutput
5
6
 
6
- Fluent::Plugin.register_output('elasticsearch_dynamic', self)
7
+ Fluent::Plugin.register_output('elasticsearch_dynamic', self)
7
8
 
8
- config_param :delimiter, :string, :default => "."
9
+ helpers :event_emitter
9
10
 
10
- DYNAMIC_PARAM_NAMES = %W[hosts host port logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
11
- DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
11
+ config_param :delimiter, :string, :default => "."
12
12
 
13
- attr_reader :dynamic_config
13
+ DYNAMIC_PARAM_NAMES = %W[hosts host port include_timestamp logstash_format logstash_prefix logstash_dateformat time_key utc_index index_name tag_key type_name id_key parent_key routing_key write_operation]
14
+ DYNAMIC_PARAM_SYMBOLS = DYNAMIC_PARAM_NAMES.map { |n| "@#{n}".to_sym }
14
15
 
15
- def configure(conf)
16
- super
16
+ RequestInfo = Struct.new(:host, :index)
17
17
 
18
- # evaluate all configurations here
19
- @dynamic_config = {}
20
- DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
21
- value = expand_param(self.instance_variable_get(var), nil, nil, nil)
22
- key = DYNAMIC_PARAM_NAMES[i]
23
- @dynamic_config[key] = value.to_s
24
- }
25
- # end eval all configs
26
- @current_config = nil
27
- end
18
+ attr_reader :dynamic_config
28
19
 
29
- def create_meta_config_map
30
- {'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => '_routing'}
31
- end
20
+ def configure(conf)
21
+ super
32
22
 
33
- def client(host)
34
-
35
- # check here to see if we already have a client connection for the given host
36
- connection_options = get_connection_options(host)
37
-
38
- @_es = nil unless is_existing_connection(connection_options[:hosts])
39
-
40
- @_es ||= begin
41
- @current_config = connection_options[:hosts].clone
42
- excon_options = { client_key: @client_key, client_cert: @client_cert, client_key_pass: @client_key_pass }
43
- adapter_conf = lambda {|f| f.adapter :excon, excon_options }
44
- transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
45
- options: {
46
- reload_connections: @reload_connections,
47
- reload_on_failure: @reload_on_failure,
48
- resurrect_after: @resurrect_after,
49
- retry_on_failure: 5,
50
- transport_options: {
51
- headers: { 'Content-Type' => 'application/json' },
52
- request: { timeout: @request_timeout },
53
- ssl: { verify: @ssl_verify, ca_file: @ca_file }
54
- }
55
- }), &adapter_conf)
56
- es = Elasticsearch::Client.new transport: transport
23
+ # evaluate all configurations here
24
+ @dynamic_config = {}
25
+ DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
26
+ value = expand_param(self.instance_variable_get(var), nil, nil, nil)
27
+ key = DYNAMIC_PARAM_NAMES[i]
28
+ @dynamic_config[key] = value.to_s
29
+ }
30
+ # end eval all configs
31
+ end
32
+
33
+ def create_meta_config_map
34
+ {'id_key' => '_id', 'parent_key' => '_parent', 'routing_key' => @routing_key_name}
35
+ end
57
36
 
58
- begin
59
- raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description(host)})!" unless es.ping
60
- rescue *es.transport.host_unreachable_exceptions => e
61
- raise ConnectionFailure, "Can not reach Elasticsearch cluster (#{connection_options_description(host)})! #{e.message}"
62
- end
63
37
 
64
- log.info "Connection opened to Elasticsearch cluster => #{connection_options_description(host)}"
65
- es
38
+ def client(host = nil, compress_connection = false)
39
+ # check here to see if we already have a client connection for the given host
40
+ connection_options = get_connection_options(host)
41
+
42
+ @_es = nil unless is_existing_connection(connection_options[:hosts])
43
+ @_es = nil unless @compressable_connection == compress_connection
44
+
45
+ @_es ||= begin
46
+ @compressable_connection = compress_connection
47
+ @current_config = connection_options[:hosts].clone
48
+ adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
49
+ gzip_headers = if compress_connection
50
+ {'Content-Encoding' => 'gzip'}
51
+ else
52
+ {}
53
+ end
54
+ headers = { 'Content-Type' => @content_type.to_s, }.merge(gzip_headers)
55
+ ssl_options = { verify: @ssl_verify, ca_file: @ca_file}.merge(@ssl_version_options)
56
+ transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
57
+ options: {
58
+ reload_connections: @reload_connections,
59
+ reload_on_failure: @reload_on_failure,
60
+ resurrect_after: @resurrect_after,
61
+ logger: @transport_logger,
62
+ transport_options: {
63
+ headers: headers,
64
+ request: { timeout: @request_timeout },
65
+ ssl: ssl_options,
66
+ },
67
+ http: {
68
+ user: @user,
69
+ password: @password,
70
+ scheme: @scheme
71
+ },
72
+ compression: compress_connection,
73
+ }), &adapter_conf)
74
+ Elasticsearch::Client.new transport: transport
75
+ end
66
76
  end
67
- end
68
77
 
69
- def get_connection_options(con_host)
70
- raise "`password` must be present if `user` is present" if @user && !@password
71
-
72
- hosts = if con_host || @hosts
73
- (con_host || @hosts).split(',').map do |host_str|
74
- # Support legacy hosts format host:port,host:port,host:port...
75
- if host_str.match(%r{^[^:]+(\:\d+)?$})
76
- {
77
- host: host_str.split(':')[0],
78
- port: (host_str.split(':')[1] || @port).to_i,
79
- scheme: @scheme
80
- }
81
- else
82
- # New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
83
- uri = URI(host_str)
84
- %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
85
- hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
86
- hash
78
+ def get_connection_options(con_host)
79
+ raise "`password` must be present if `user` is present" if @user && !@password
80
+
81
+ hosts = if con_host || @hosts
82
+ (con_host || @hosts).split(',').map do |host_str|
83
+ # Support legacy hosts format host:port,host:port,host:port...
84
+ if host_str.match(%r{^[^:]+(\:\d+)?$})
85
+ {
86
+ host: host_str.split(':')[0],
87
+ port: (host_str.split(':')[1] || @port).to_i,
88
+ scheme: @scheme.to_s
89
+ }
90
+ else
91
+ # New hosts format expects URLs such as http://logs.foo.com,https://john:pass@logs2.foo.com/elastic
92
+ uri = URI(get_escaped_userinfo(host_str))
93
+ %w(user password path).inject(host: uri.host, port: uri.port, scheme: uri.scheme) do |hash, key|
94
+ hash[key.to_sym] = uri.public_send(key) unless uri.public_send(key).nil? || uri.public_send(key) == ''
95
+ hash
96
+ end
87
97
  end
88
- end
89
- end.compact
90
- else
91
- [{host: @host, port: @port.to_i, scheme: @scheme}]
92
- end.each do |host|
93
- host.merge!(user: @user, password: @password) if !host[:user] && @user
94
- host.merge!(path: @path) if !host[:path] && @path
98
+ end.compact
99
+ else
100
+ [{host: @host, port: @port.to_i, scheme: @scheme.to_s}]
101
+ end.each do |host|
102
+ host.merge!(user: @user, password: @password) if !host[:user] && @user
103
+ host.merge!(path: @path) if !host[:path] && @path
104
+ end
105
+
106
+ {
107
+ hosts: hosts
108
+ }
95
109
  end
96
110
 
97
- {
98
- hosts: hosts
99
- }
100
- end
111
+ def connection_options_description(host)
112
+ get_connection_options(host)[:hosts].map do |host_info|
113
+ attributes = host_info.dup
114
+ attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
115
+ attributes.inspect
116
+ end.join(', ')
117
+ end
101
118
 
102
- def connection_options_description(host)
103
- get_connection_options(host)[:hosts].map do |host_info|
104
- attributes = host_info.dup
105
- attributes[:password] = 'obfuscated' if attributes.has_key?(:password)
106
- attributes.inspect
107
- end.join(', ')
108
- end
119
+ def multi_workers_ready?
120
+ true
121
+ end
109
122
 
110
- def write_objects(tag, chunk)
111
- bulk_message = Hash.new { |h,k| h[k] = '' }
112
- dynamic_conf = @dynamic_config.clone
123
+ def write(chunk)
124
+ bulk_message = Hash.new { |h,k| h[k] = '' }
125
+ dynamic_conf = @dynamic_config.clone
113
126
 
114
- headers = {
115
- UPDATE_OP => {},
116
- UPSERT_OP => {},
117
- CREATE_OP => {},
118
- INDEX_OP => {}
119
- }
127
+ headers = {
128
+ UPDATE_OP => {},
129
+ UPSERT_OP => {},
130
+ CREATE_OP => {},
131
+ INDEX_OP => {}
132
+ }
120
133
 
121
- chunk.msgpack_each do |time, record|
122
- next unless record.is_a? Hash
134
+ tag = chunk.metadata.tag
123
135
 
124
- # evaluate all configurations here
125
- DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
126
- k = DYNAMIC_PARAM_NAMES[i]
127
- v = self.instance_variable_get(var)
128
- # check here to determine if we should evaluate
129
- if dynamic_conf[k] != v
130
- value = expand_param(v, tag, time, record)
131
- dynamic_conf[k] = value
132
- end
133
- }
134
- # end eval all configs
136
+ chunk.msgpack_each do |time, record|
137
+ next unless record.is_a? Hash
135
138
 
136
- if eval_or_val(dynamic_conf['logstash_format'])
137
- if record.has_key?("@timestamp")
138
- time = Time.parse record["@timestamp"]
139
- elsif record.has_key?(dynamic_conf['time_key'])
140
- time = Time.parse record[dynamic_conf['time_key']]
141
- record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
142
- else
143
- record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
139
+ if @flatten_hashes
140
+ record = flatten_record(record)
144
141
  end
145
142
 
146
- if eval_or_val(dynamic_conf['utc_index'])
147
- target_index = "#{dynamic_conf['logstash_prefix']}-#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
148
- else
149
- target_index = "#{dynamic_conf['logstash_prefix']}-#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
143
+ begin
144
+ # evaluate all configurations here
145
+ DYNAMIC_PARAM_SYMBOLS.each_with_index { |var, i|
146
+ k = DYNAMIC_PARAM_NAMES[i]
147
+ v = self.instance_variable_get(var)
148
+ # check here to determine if we should evaluate
149
+ if dynamic_conf[k] != v
150
+ value = expand_param(v, tag, time, record)
151
+ dynamic_conf[k] = value
152
+ end
153
+ }
154
+ # end eval all configs
155
+ rescue => e
156
+ # handle dynamic parameters misconfigurations
157
+ router.emit_error_event(tag, time, record, e)
158
+ next
150
159
  end
151
- else
152
- target_index = dynamic_conf['index_name']
153
- end
154
160
 
155
- # Change target_index to lower-case since Elasticsearch doesn't
156
- # allow upper-case characters in index names.
157
- target_index = target_index.downcase
161
+ if eval_or_val(dynamic_conf['logstash_format']) || eval_or_val(dynamic_conf['include_timestamp'])
162
+ if record.has_key?("@timestamp")
163
+ time = Time.parse record["@timestamp"]
164
+ elsif record.has_key?(dynamic_conf['time_key'])
165
+ time = Time.parse record[dynamic_conf['time_key']]
166
+ record['@timestamp'] = record[dynamic_conf['time_key']] unless time_key_exclude_timestamp
167
+ else
168
+ record.merge!({"@timestamp" => Time.at(time).iso8601(@time_precision)})
169
+ end
170
+ end
158
171
 
159
- if @include_tag_key
160
- record.merge!(dynamic_conf['tag_key'] => tag)
161
- end
172
+ if eval_or_val(dynamic_conf['logstash_format'])
173
+ if eval_or_val(dynamic_conf['utc_index'])
174
+ target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).getutc.strftime("#{dynamic_conf['logstash_dateformat']}")}"
175
+ else
176
+ target_index = "#{dynamic_conf['logstash_prefix']}#{@logstash_prefix_separator}#{Time.at(time).strftime("#{dynamic_conf['logstash_dateformat']}")}"
177
+ end
178
+ else
179
+ target_index = dynamic_conf['index_name']
180
+ end
162
181
 
163
- meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
182
+ # Change target_index to lower-case since Elasticsearch doesn't
183
+ # allow upper-case characters in index names.
184
+ target_index = target_index.downcase
164
185
 
165
- @meta_config_map.each_pair do |config_name, meta_key|
166
- if dynamic_conf[config_name] && record[dynamic_conf[config_name]]
167
- meta[meta_key] = record[dynamic_conf[config_name]]
186
+ if @include_tag_key
187
+ record.merge!(dynamic_conf['tag_key'] => tag)
168
188
  end
169
- end
170
189
 
171
- if dynamic_conf['hosts']
172
- host = dynamic_conf['hosts']
173
- else
174
- host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
175
- end
190
+ if dynamic_conf['hosts']
191
+ host = dynamic_conf['hosts']
192
+ else
193
+ host = "#{dynamic_conf['host']}:#{dynamic_conf['port']}"
194
+ end
176
195
 
177
- if @remove_keys
178
- @remove_keys.each { |key| record.delete(key) }
179
- end
196
+ if @include_index_in_url
197
+ key = RequestInfo.new(host, target_index)
198
+ meta = {"_type" => dynamic_conf['type_name']}
199
+ else
200
+ key = RequestInfo.new(host, nil)
201
+ meta = {"_index" => target_index, "_type" => dynamic_conf['type_name']}
202
+ end
180
203
 
181
- write_op = dynamic_conf["write_operation"]
182
- append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[host])
183
- end
204
+ @meta_config_map.each_pair do |config_name, meta_key|
205
+ if dynamic_conf[config_name] && accessor = record_accessor_create(dynamic_conf[config_name])
206
+ if raw_value = accessor.call(record)
207
+ meta[meta_key] = raw_value
208
+ end
209
+ end
210
+ end
184
211
 
185
- bulk_message.each do |hKey, msgs|
186
- send_bulk(msgs, hKey) unless msgs.empty?
187
- msgs.clear
188
- end
189
- end
212
+ if @remove_keys
213
+ @remove_keys.each { |key| record.delete(key) }
214
+ end
190
215
 
191
- def send_bulk(data, host)
192
- retries = 0
193
- begin
194
- response = client(host).bulk body: data
195
- if response['errors']
196
- log.error "Could not push log to Elasticsearch: #{response}"
216
+ write_op = dynamic_conf["write_operation"]
217
+ append_record_to_messages(write_op, meta, headers[write_op], record, bulk_message[key])
197
218
  end
198
- rescue *client(host).transport.host_unreachable_exceptions => e
199
- if retries < 2
200
- retries += 1
201
- @_es = nil
202
- log.warn "Could not push logs to Elasticsearch, resetting connection and trying again. #{e.message}"
203
- sleep 2**retries
204
- retry
219
+
220
+ bulk_message.each do |info, msgs|
221
+ send_bulk(msgs, info.host, info.index) unless msgs.empty?
222
+ msgs.clear
205
223
  end
206
- raise ConnectionFailure, "Could not push logs to Elasticsearch after #{retries} retries. #{e.message}"
207
- rescue Exception
208
- @_es = nil if @reconnect_on_error
209
- raise
210
224
  end
211
- end
212
225
 
213
- def eval_or_val(var)
214
- return var unless var.is_a?(String)
215
- eval(var)
216
- end
217
-
218
- def expand_param(param, tag, time, record)
219
- # check for '${ ... }'
220
- # yes => `eval`
221
- # no => return param
222
- return param if (param =~ /\${.+}/).nil?
223
-
224
- # check for 'tag_parts[]'
225
- # separated by a delimiter (default '.')
226
- tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?
227
-
228
- # pull out section between ${} then eval
229
- inner = param.clone
230
- while inner.match(/\${.+}/)
231
- to_eval = inner.match(/\${(.+?)}/){$1}
232
-
233
- if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
234
- return to_eval
235
- elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
236
- return to_eval
237
- elsif !(to_eval =~/time/).nil? && time.nil?
238
- return to_eval
239
- else
240
- inner.sub!(/\${.+?}/, eval( to_eval ))
226
+ def send_bulk(data, host, index)
227
+ begin
228
+ prepared_data = if compression
229
+ gzip(data)
230
+ else
231
+ data
232
+ end
233
+ response = client(host, compression).bulk body: prepared_data, index: index
234
+ if response['errors']
235
+ log.error "Could not push log to Elasticsearch: #{response}"
236
+ end
237
+ rescue => e
238
+ @_es = nil if @reconnect_on_error
239
+ # FIXME: identify unrecoverable errors and raise UnrecoverableRequestFailure instead
240
+ raise RecoverableRequestFailure, "could not push logs to Elasticsearch cluster (#{connection_options_description(host)}): #{e.message}"
241
241
  end
242
242
  end
243
- inner
244
- end
245
-
246
- def is_valid_expand_param_type(param)
247
- return false if [:@buffer_type].include?(param)
248
- return self.instance_variable_get(param).is_a?(String)
249
- end
250
243
 
251
- def is_existing_connection(host)
252
- # check if the host provided match the current connection
253
- return false if @_es.nil?
254
- return false if host.length != @current_config.length
244
+ def eval_or_val(var)
245
+ return var unless var.is_a?(String)
246
+ eval(var)
247
+ end
255
248
 
256
- for i in 0...host.length
257
- if !host[i][:host].eql? @current_config[i][:host] || host[i][:port] != @current_config[i][:port]
258
- return false
249
+ def expand_param(param, tag, time, record)
250
+ # check for '${ ... }'
251
+ # yes => `eval`
252
+ # no => return param
253
+ return param if (param.to_s =~ /\${.+}/).nil?
254
+
255
+ # check for 'tag_parts[]'
256
+ # separated by a delimiter (default '.')
257
+ tag_parts = tag.split(@delimiter) unless (param =~ /tag_parts\[.+\]/).nil? || tag.nil?
258
+
259
+ # pull out section between ${} then eval
260
+ inner = param.clone
261
+ while inner.match(/\${.+}/)
262
+ to_eval = inner.match(/\${(.+?)}/){$1}
263
+
264
+ if !(to_eval =~ /record\[.+\]/).nil? && record.nil?
265
+ return to_eval
266
+ elsif !(to_eval =~/tag_parts\[.+\]/).nil? && tag_parts.nil?
267
+ return to_eval
268
+ elsif !(to_eval =~/time/).nil? && time.nil?
269
+ return to_eval
270
+ else
271
+ inner.sub!(/\${.+?}/, eval( to_eval ))
272
+ end
259
273
  end
274
+ inner
260
275
  end
261
276
 
262
- return true
277
+ def is_valid_expand_param_type(param)
278
+ return false if [:@buffer_type].include?(param)
279
+ return self.instance_variable_get(param).is_a?(String)
280
+ end
263
281
  end
264
282
  end
@@ -0,0 +1,9 @@
1
+ {
2
+ "order": 5,
3
+ "template": "--index_prefix-----appid---*",
4
+ "settings": {},
5
+ "mappings": {},
6
+ "aliases": {
7
+ "--appid---alias": {}
8
+ }
9
+ }