logstash-output-opensearch 1.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (61) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +0 -0
  3. data.tar.gz.sig +0 -0
  4. data/ADMINS.md +29 -0
  5. data/CODE_OF_CONDUCT.md +25 -0
  6. data/CONTRIBUTING.md +99 -0
  7. data/DEVELOPER_GUIDE.md +208 -0
  8. data/Gemfile +20 -0
  9. data/LICENSE +202 -0
  10. data/MAINTAINERS.md +71 -0
  11. data/NOTICE +2 -0
  12. data/README.md +37 -0
  13. data/RELEASING.md +36 -0
  14. data/SECURITY.md +3 -0
  15. data/lib/logstash/outputs/opensearch.rb +449 -0
  16. data/lib/logstash/outputs/opensearch/distribution_checker.rb +44 -0
  17. data/lib/logstash/outputs/opensearch/http_client.rb +465 -0
  18. data/lib/logstash/outputs/opensearch/http_client/manticore_adapter.rb +140 -0
  19. data/lib/logstash/outputs/opensearch/http_client/pool.rb +467 -0
  20. data/lib/logstash/outputs/opensearch/http_client_builder.rb +182 -0
  21. data/lib/logstash/outputs/opensearch/template_manager.rb +60 -0
  22. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/1x.json +44 -0
  23. data/lib/logstash/outputs/opensearch/templates/ecs-disabled/7x.json +44 -0
  24. data/lib/logstash/plugin_mixins/opensearch/api_configs.rb +168 -0
  25. data/lib/logstash/plugin_mixins/opensearch/common.rb +294 -0
  26. data/lib/logstash/plugin_mixins/opensearch/noop_distribution_checker.rb +18 -0
  27. data/logstash-output-opensearch.gemspec +40 -0
  28. data/spec/fixtures/_nodes/nodes.json +74 -0
  29. data/spec/fixtures/htpasswd +2 -0
  30. data/spec/fixtures/nginx_reverse_proxy.conf +22 -0
  31. data/spec/fixtures/scripts/painless/scripted_update.painless +2 -0
  32. data/spec/fixtures/scripts/painless/scripted_update_nested.painless +1 -0
  33. data/spec/fixtures/scripts/painless/scripted_upsert.painless +1 -0
  34. data/spec/integration/outputs/compressed_indexing_spec.rb +76 -0
  35. data/spec/integration/outputs/create_spec.rb +76 -0
  36. data/spec/integration/outputs/delete_spec.rb +72 -0
  37. data/spec/integration/outputs/index_spec.rb +164 -0
  38. data/spec/integration/outputs/index_version_spec.rb +110 -0
  39. data/spec/integration/outputs/ingest_pipeline_spec.rb +82 -0
  40. data/spec/integration/outputs/metrics_spec.rb +75 -0
  41. data/spec/integration/outputs/no_opensearch_on_startup_spec.rb +67 -0
  42. data/spec/integration/outputs/painless_update_spec.rb +147 -0
  43. data/spec/integration/outputs/parent_spec.rb +103 -0
  44. data/spec/integration/outputs/retry_spec.rb +182 -0
  45. data/spec/integration/outputs/routing_spec.rb +70 -0
  46. data/spec/integration/outputs/sniffer_spec.rb +70 -0
  47. data/spec/integration/outputs/templates_spec.rb +105 -0
  48. data/spec/integration/outputs/update_spec.rb +123 -0
  49. data/spec/opensearch_spec_helper.rb +141 -0
  50. data/spec/spec_helper.rb +19 -0
  51. data/spec/unit/http_client_builder_spec.rb +194 -0
  52. data/spec/unit/outputs/error_whitelist_spec.rb +62 -0
  53. data/spec/unit/outputs/opensearch/http_client/manticore_adapter_spec.rb +159 -0
  54. data/spec/unit/outputs/opensearch/http_client/pool_spec.rb +306 -0
  55. data/spec/unit/outputs/opensearch/http_client_spec.rb +292 -0
  56. data/spec/unit/outputs/opensearch/template_manager_spec.rb +36 -0
  57. data/spec/unit/outputs/opensearch_proxy_spec.rb +112 -0
  58. data/spec/unit/outputs/opensearch_spec.rb +800 -0
  59. data/spec/unit/outputs/opensearch_ssl_spec.rb +179 -0
  60. metadata +289 -0
  61. metadata.gz.sig +0 -0
@@ -0,0 +1,44 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+ #
10
+ module LogStash; module Outputs; class OpenSearch
11
+ class DistributionChecker
12
+
13
+ def initialize(logger)
14
+ @logger = logger
15
+ end
16
+
17
+ # Checks whether connecting cluster is one of supported distribution or not
18
+ # @param pool
19
+ # @param url [LogStash::Util::SafeURI] OpenSearch node URL
20
+ # @param major_version OpenSearch major version number
21
+ # @return [Boolean] true if supported
22
+ def is_supported?(pool, url, major_version)
23
+ distribution = get_distribution(pool, url)
24
+ case distribution
25
+ when 'opensearch'
26
+ return true
27
+ when 'oss'
28
+ if major_version == 7
29
+ return true
30
+ end
31
+ end
32
+ log_not_supported(url, major_version, distribution)
33
+ false
34
+ end
35
+
36
+ def get_distribution(pool, url)
37
+ pool.get_distribution(url)
38
+ end
39
+
40
+ def log_not_supported(url, major_version, distribution)
41
+ @logger.error("Could not connect to cluster", url: url.sanitized.to_s, distribution: distribution, major_version: major_version)
42
+ end
43
+ end
44
+ end; end; end
@@ -0,0 +1,465 @@
1
+ # SPDX-License-Identifier: Apache-2.0
2
+ #
3
+ # The OpenSearch Contributors require contributions made to
4
+ # this file be licensed under the Apache-2.0 license or a
5
+ # compatible open source license.
6
+ #
7
+ # Modifications Copyright OpenSearch Contributors. See
8
+ # GitHub history for details.
9
+
10
+ require "logstash/outputs/opensearch"
11
+ require 'logstash/outputs/opensearch/http_client/pool'
12
+ require 'logstash/outputs/opensearch/http_client/manticore_adapter'
13
+ require 'cgi'
14
+ require 'zlib'
15
+ require 'stringio'
16
+
17
+ module LogStash; module Outputs; class OpenSearch;
18
+ # This is a constant instead of a config option because
19
+ # there really isn't a good reason to configure it.
20
+ #
21
+ # The criteria used are:
22
+ # 1. We need a number that's less than 100MiB because OpenSearch
23
+ # won't accept bulks larger than that.
24
+ # 2. It must be large enough to amortize the connection constant
25
+ # across multiple requests.
26
+ # 3. It must be small enough that even if multiple threads hit this size
27
+ # we won't use a lot of heap.
28
+ #
29
+ # We wound up agreeing that a number greater than 10 MiB and less than 100MiB
30
+ # made sense. We picked one on the lowish side to not use too much heap.
31
+ TARGET_BULK_BYTES = 20 * 1024 * 1024 # 20MiB
32
+
33
+ class HttpClient
34
+ attr_reader :client, :options, :logger, :pool, :action_count, :recv_count
35
+ # This is here in case we use DEFAULT_OPTIONS in the future
36
+ # DEFAULT_OPTIONS = {
37
+ # :setting => value
38
+ # }
39
+
40
+ #
41
+ # The `options` is a hash where the following symbol keys have meaning:
42
+ #
43
+ # * `:hosts` - array of String. Set a list of hosts to use for communication.
44
+ # * `:port` - number. set the port to use to communicate with OpenSearch
45
+ # * `:user` - String. The user to use for authentication.
46
+ # * `:password` - String. The password to use for authentication.
47
+ # * `:timeout` - Float. A duration value, in seconds, after which a socket
48
+ # operation or request will be aborted if not yet successfull
49
+ # * `:client_settings` - a hash; see below for keys.
50
+ #
51
+ # The `client_settings` key is a has that can contain other settings:
52
+ #
53
+ # * `:ssl` - Boolean. Enable or disable SSL/TLS.
54
+ # * `:proxy` - String. Choose a HTTP HTTProxy to use.
55
+ # * `:path` - String. The leading path for prefixing OpenSearch
56
+ # * `:headers` - Hash. Pairs of headers and their values
57
+ # requests. This is sometimes used if you are proxying OpenSearch access
58
+ # through a special http path, such as using mod_rewrite.
59
+ def initialize(options={})
60
+ @logger = options[:logger]
61
+ @metric = options[:metric]
62
+ @bulk_request_metrics = @metric.namespace(:bulk_requests)
63
+ @bulk_response_metrics = @bulk_request_metrics.namespace(:responses)
64
+
65
+ # Again, in case we use DEFAULT_OPTIONS in the future, uncomment this.
66
+ # @options = DEFAULT_OPTIONS.merge(options)
67
+ @options = options
68
+
69
+ @url_template = build_url_template
70
+
71
+ @pool = build_pool(@options)
72
+ # mutex to prevent requests and sniffing to access the
73
+ # connection pool at the same time
74
+ @bulk_path = @options[:bulk_path]
75
+ end
76
+
77
+ def build_url_template
78
+ {
79
+ :scheme => self.scheme,
80
+ :user => self.user,
81
+ :password => self.password,
82
+ :host => "URLTEMPLATE",
83
+ :port => self.port,
84
+ :path => self.path
85
+ }
86
+ end
87
+
88
+ def template_install(name, template, force=false)
89
+ if template_exists?(name) && !force
90
+ @logger.debug("Found existing OpenSearch template, skipping template management", name: name)
91
+ return
92
+ end
93
+ template_put(name, template)
94
+ end
95
+
96
+ def last_version
97
+ @pool.last_version
98
+ end
99
+
100
+ def maximum_seen_major_version
101
+ @pool.maximum_seen_major_version
102
+ end
103
+
104
+ def bulk(actions)
105
+ @action_count ||= 0
106
+ @action_count += actions.size
107
+
108
+ return if actions.empty?
109
+
110
+ bulk_actions = actions.collect do |action, args, source|
111
+ args, source = update_action_builder(args, source) if action == 'update'
112
+
113
+ if source && action != 'delete'
114
+ next [ { action => args }, source ]
115
+ else
116
+ next { action => args }
117
+ end
118
+ end
119
+
120
+ body_stream = StringIO.new
121
+ if http_compression
122
+ body_stream.set_encoding "BINARY"
123
+ stream_writer = gzip_writer(body_stream)
124
+ else
125
+ stream_writer = body_stream
126
+ end
127
+ bulk_responses = []
128
+ batch_actions = []
129
+ bulk_actions.each_with_index do |action, index|
130
+ as_json = action.is_a?(Array) ?
131
+ action.map {|line| LogStash::Json.dump(line)}.join("\n") :
132
+ LogStash::Json.dump(action)
133
+ as_json << "\n"
134
+ if (stream_writer.pos + as_json.bytesize) > TARGET_BULK_BYTES && stream_writer.pos > 0
135
+ stream_writer.flush # ensure writer has sync'd buffers before reporting sizes
136
+ logger.debug("Sending partial bulk request for batch with one or more actions remaining.",
137
+ :action_count => batch_actions.size,
138
+ :payload_size => stream_writer.pos,
139
+ :content_length => body_stream.size,
140
+ :batch_offset => (index + 1 - batch_actions.size))
141
+ bulk_responses << bulk_send(body_stream, batch_actions)
142
+ body_stream.truncate(0) && body_stream.seek(0)
143
+ stream_writer = gzip_writer(body_stream) if http_compression
144
+ batch_actions.clear
145
+ end
146
+ stream_writer.write(as_json)
147
+ batch_actions << action
148
+ end
149
+ stream_writer.close if http_compression
150
+ logger.debug("Sending final bulk request for batch.",
151
+ :action_count => batch_actions.size,
152
+ :payload_size => stream_writer.pos,
153
+ :content_length => body_stream.size,
154
+ :batch_offset => (actions.size - batch_actions.size))
155
+ bulk_responses << bulk_send(body_stream, batch_actions) if body_stream.size > 0
156
+ body_stream.close if !http_compression
157
+ join_bulk_responses(bulk_responses)
158
+ end
159
+
160
+ def gzip_writer(io)
161
+ fail(ArgumentError, "Cannot create gzip writer on IO with unread bytes") unless io.eof?
162
+ fail(ArgumentError, "Cannot create gzip writer on non-empty IO") unless io.pos == 0
163
+
164
+ Zlib::GzipWriter.new(io, Zlib::DEFAULT_COMPRESSION, Zlib::DEFAULT_STRATEGY)
165
+ end
166
+
167
+ def join_bulk_responses(bulk_responses)
168
+ {
169
+ "errors" => bulk_responses.any? {|r| r["errors"] == true},
170
+ "items" => bulk_responses.reduce([]) {|m,r| m.concat(r.fetch("items", []))}
171
+ }
172
+ end
173
+
174
+ def bulk_send(body_stream, batch_actions)
175
+ params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
176
+ response = @pool.post(@bulk_path, params, body_stream.string)
177
+
178
+ @bulk_response_metrics.increment(response.code.to_s)
179
+
180
+ case response.code
181
+ when 200 # OK
182
+ LogStash::Json.load(response.body)
183
+ when 413 # Payload Too Large
184
+ logger.warn("Bulk request rejected: `413 Payload Too Large`", :action_count => batch_actions.size, :content_length => body_stream.size)
185
+ emulate_batch_error_response(batch_actions, response.code, 'payload_too_large')
186
+ else
187
+ url = ::LogStash::Util::SafeURI.new(response.final_url)
188
+ raise ::LogStash::Outputs::OpenSearch::HttpClient::Pool::BadResponseCodeError.new(
189
+ response.code, url, body_stream.to_s, response.body
190
+ )
191
+ end
192
+ end
193
+
194
+ def emulate_batch_error_response(actions, http_code, reason)
195
+ {
196
+ "errors" => true,
197
+ "items" => actions.map do |action|
198
+ action = action.first if action.is_a?(Array)
199
+ request_action, request_parameters = action.first
200
+ {
201
+ request_action => {"status" => http_code, "error" => { "type" => reason }}
202
+ }
203
+ end
204
+ }
205
+ end
206
+
207
+ def get(path)
208
+ response = @pool.get(path, nil)
209
+ LogStash::Json.load(response.body)
210
+ end
211
+
212
+ def post(path, params = {}, body_string)
213
+ response = @pool.post(path, params, body_string)
214
+ LogStash::Json.load(response.body)
215
+ end
216
+
217
+ def close
218
+ @pool.close
219
+ end
220
+
221
+ def calculate_property(uris, property, default, sniff_check)
222
+ values = uris.map(&property).uniq
223
+
224
+ if sniff_check && values.size > 1
225
+ raise LogStash::ConfigurationError, "Cannot have multiple values for #{property} in hosts when sniffing is enabled!"
226
+ end
227
+
228
+ uri_value = values.first
229
+
230
+ default = nil if default.is_a?(String) && default.empty? # Blanks are as good as nil
231
+ uri_value = nil if uri_value.is_a?(String) && uri_value.empty?
232
+
233
+ if default && uri_value && (default != uri_value)
234
+ raise LogStash::ConfigurationError, "Explicit value for '#{property}' was declared, but it is different in one of the URLs given! Please make sure your URLs are inline with explicit values. The URLs have the property set to '#{uri_value}', but it was also set to '#{default}' explicitly"
235
+ end
236
+
237
+ uri_value || default
238
+ end
239
+
240
+ def sniffing
241
+ @options[:sniffing]
242
+ end
243
+
244
+ def user
245
+ calculate_property(uris, :user, @options[:user], sniffing)
246
+ end
247
+
248
+ def password
249
+ calculate_property(uris, :password, @options[:password], sniffing)
250
+ end
251
+
252
+ def path
253
+ calculated = calculate_property(uris, :path, client_settings[:path], sniffing)
254
+ calculated = "/#{calculated}" if calculated && !calculated.start_with?("/")
255
+ calculated
256
+ end
257
+
258
+ def scheme
259
+ explicit_scheme = if ssl_options && ssl_options.has_key?(:enabled)
260
+ ssl_options[:enabled] ? 'https' : 'http'
261
+ else
262
+ nil
263
+ end
264
+
265
+ calculated_scheme = calculate_property(uris, :scheme, explicit_scheme, sniffing)
266
+
267
+ if calculated_scheme && calculated_scheme !~ /https?/
268
+ raise LogStash::ConfigurationError, "Bad scheme '#{calculated_scheme}' found should be one of http/https"
269
+ end
270
+
271
+ if calculated_scheme && explicit_scheme && calculated_scheme != explicit_scheme
272
+ raise LogStash::ConfigurationError, "SSL option was explicitly set to #{ssl_options[:enabled]} but a URL was also declared with a scheme of '#{explicit_scheme}'. Please reconcile this"
273
+ end
274
+
275
+ calculated_scheme # May be nil if explicit_scheme is nil!
276
+ end
277
+
278
+ def port
279
+ # We don't set the 'default' here because the default is what the user
280
+ # indicated, so we use an || outside of calculate_property. This lets people
281
+ # Enter things like foo:123, bar and wind up with foo:123, bar:9200
282
+ calculate_property(uris, :port, nil, sniffing) || 9200
283
+ end
284
+
285
+ def uris
286
+ @options[:hosts]
287
+ end
288
+
289
+ def client_settings
290
+ @options[:client_settings] || {}
291
+ end
292
+
293
+ def ssl_options
294
+ client_settings.fetch(:ssl, {})
295
+ end
296
+
297
+ def http_compression
298
+ client_settings.fetch(:http_compression, false)
299
+ end
300
+
301
+ def build_adapter(options)
302
+ timeout = options[:timeout] || 0
303
+
304
+ adapter_options = {
305
+ :socket_timeout => timeout,
306
+ :request_timeout => timeout,
307
+ }
308
+
309
+ adapter_options[:proxy] = client_settings[:proxy] if client_settings[:proxy]
310
+
311
+ adapter_options[:check_connection_timeout] = client_settings[:check_connection_timeout] if client_settings[:check_connection_timeout]
312
+
313
+ # Having this explicitly set to nil is an error
314
+ if client_settings[:pool_max]
315
+ adapter_options[:pool_max] = client_settings[:pool_max]
316
+ end
317
+
318
+ # Having this explicitly set to nil is an error
319
+ if client_settings[:pool_max_per_route]
320
+ adapter_options[:pool_max_per_route] = client_settings[:pool_max_per_route]
321
+ end
322
+
323
+ adapter_options[:ssl] = ssl_options if self.scheme == 'https'
324
+
325
+ adapter_options[:headers] = client_settings[:headers] if client_settings[:headers]
326
+
327
+ adapter_class = ::LogStash::Outputs::OpenSearch::HttpClient::ManticoreAdapter
328
+ adapter = adapter_class.new(@logger, adapter_options)
329
+ end
330
+
331
+ def build_pool(options)
332
+ adapter = build_adapter(options)
333
+
334
+ pool_options = {
335
+ :distribution_checker => options[:distribution_checker],
336
+ :sniffing => sniffing,
337
+ :sniffer_delay => options[:sniffer_delay],
338
+ :sniffing_path => options[:sniffing_path],
339
+ :healthcheck_path => options[:healthcheck_path],
340
+ :resurrect_delay => options[:resurrect_delay],
341
+ :url_normalizer => self.method(:host_to_url),
342
+ :metric => options[:metric]
343
+ }
344
+ pool_options[:scheme] = self.scheme if self.scheme
345
+
346
+ pool_class = ::LogStash::Outputs::OpenSearch::HttpClient::Pool
347
+ full_urls = @options[:hosts].map {|h| host_to_url(h) }
348
+ pool = pool_class.new(@logger, adapter, full_urls, pool_options)
349
+ pool.start
350
+ pool
351
+ end
352
+
353
+ def host_to_url(h)
354
+ # Never override the calculated scheme
355
+ raw_scheme = @url_template[:scheme] || 'http'
356
+
357
+ raw_user = h.user || @url_template[:user]
358
+ raw_password = h.password || @url_template[:password]
359
+ postfixed_userinfo = raw_user && raw_password ? "#{raw_user}:#{raw_password}@" : nil
360
+
361
+ raw_host = h.host # Always replace this!
362
+ raw_port = h.port || @url_template[:port]
363
+
364
+ raw_path = !h.path.nil? && !h.path.empty? && h.path != "/" ? h.path : @url_template[:path]
365
+ prefixed_raw_path = raw_path && !raw_path.empty? ? raw_path : "/"
366
+
367
+ parameters = client_settings[:parameters]
368
+ raw_query = if parameters && !parameters.empty?
369
+ combined = h.query ?
370
+ Hash[URI::decode_www_form(h.query)].merge(parameters) :
371
+ parameters
372
+ query_str = combined.flat_map {|k,v|
373
+ values = Array(v)
374
+ values.map {|av| "#{k}=#{av}"}
375
+ }.join("&")
376
+ query_str
377
+ else
378
+ h.query
379
+ end
380
+ prefixed_raw_query = raw_query && !raw_query.empty? ? "?#{raw_query}" : nil
381
+
382
+ raw_url = "#{raw_scheme}://#{postfixed_userinfo}#{raw_host}:#{raw_port}#{prefixed_raw_path}#{prefixed_raw_query}"
383
+
384
+ ::LogStash::Util::SafeURI.new(raw_url)
385
+ end
386
+
387
+ def exists?(path, use_get=false)
388
+ response = use_get ? @pool.get(path) : @pool.head(path)
389
+ response.code >= 200 && response.code <= 299
390
+ end
391
+
392
+ def template_exists?(name)
393
+ exists?("/#{template_endpoint}/#{name}")
394
+ end
395
+
396
+ def template_put(name, template)
397
+ path = "#{template_endpoint}/#{name}"
398
+ logger.info("Installing OpenSearch template", name: name)
399
+ @pool.put(path, nil, LogStash::Json.dump(template))
400
+ end
401
+
402
+ def template_endpoint
403
+ # TODO: Check Version < 7.8 and use index template for >= 7.8 & OpenSearch
404
+ # https://docs-beta.opensearch.org/opensearch/index-templates/
405
+ '_template'
406
+ end
407
+
408
+ # check whether rollover alias already exists
409
+ def rollover_alias_exists?(name)
410
+ exists?(name)
411
+ end
412
+
413
+ # Create a new rollover alias
414
+ def rollover_alias_put(alias_name, alias_definition)
415
+ begin
416
+ @pool.put(CGI::escape(alias_name), nil, LogStash::Json.dump(alias_definition))
417
+ logger.info("Created rollover alias", name: alias_name)
418
+ # If the rollover alias already exists, ignore the error that comes back from OpenSearch
419
+ rescue ::LogStash::Outputs::OpenSearch::HttpClient::Pool::BadResponseCodeError => e
420
+ if e.response_code == 400
421
+ logger.info("Rollover alias already exists, skipping", name: alias_name)
422
+ return
423
+ end
424
+ raise e
425
+ end
426
+ end
427
+
428
+ # Build a bulk item for an opensearch update action
429
+ def update_action_builder(args, source)
430
+ args = args.clone()
431
+ if args[:_script]
432
+ # Use the event as a hash from your script with variable name defined
433
+ # by script_var_name (default: "event")
434
+ # Ex: event["@timestamp"]
435
+ source_orig = source
436
+ source = { 'script' => {'params' => { @options[:script_var_name] => source_orig }} }
437
+ if @options[:scripted_upsert]
438
+ source['scripted_upsert'] = true
439
+ source['upsert'] = {}
440
+ elsif @options[:doc_as_upsert]
441
+ source['upsert'] = source_orig
442
+ else
443
+ source['upsert'] = args.delete(:_upsert) if args[:_upsert]
444
+ end
445
+ case @options[:script_type]
446
+ when 'indexed'
447
+ source['script']['id'] = args.delete(:_script)
448
+ when 'file'
449
+ source['script']['file'] = args.delete(:_script)
450
+ when 'inline'
451
+ source['script']['inline'] = args.delete(:_script)
452
+ end
453
+ source['script']['lang'] = @options[:script_lang] if @options[:script_lang] != ''
454
+ else
455
+ source = { 'doc' => source }
456
+ if @options[:doc_as_upsert]
457
+ source['doc_as_upsert'] = true
458
+ else
459
+ source['upsert'] = args.delete(:_upsert) if args[:_upsert]
460
+ end
461
+ end
462
+ [args, source]
463
+ end
464
+ end
465
+ end end end