logstash-input-file 4.0.5 → 4.1.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (54) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +25 -3
  3. data/JAR_VERSION +1 -0
  4. data/docs/index.asciidoc +195 -37
  5. data/lib/filewatch/bootstrap.rb +74 -0
  6. data/lib/filewatch/discoverer.rb +94 -0
  7. data/lib/filewatch/helper.rb +65 -0
  8. data/lib/filewatch/observing_base.rb +97 -0
  9. data/lib/filewatch/observing_read.rb +23 -0
  10. data/lib/filewatch/observing_tail.rb +22 -0
  11. data/lib/filewatch/read_mode/handlers/base.rb +81 -0
  12. data/lib/filewatch/read_mode/handlers/read_file.rb +47 -0
  13. data/lib/filewatch/read_mode/handlers/read_zip_file.rb +57 -0
  14. data/lib/filewatch/read_mode/processor.rb +117 -0
  15. data/lib/filewatch/settings.rb +67 -0
  16. data/lib/filewatch/sincedb_collection.rb +215 -0
  17. data/lib/filewatch/sincedb_record_serializer.rb +70 -0
  18. data/lib/filewatch/sincedb_value.rb +87 -0
  19. data/lib/filewatch/tail_mode/handlers/base.rb +124 -0
  20. data/lib/filewatch/tail_mode/handlers/create.rb +17 -0
  21. data/lib/filewatch/tail_mode/handlers/create_initial.rb +21 -0
  22. data/lib/filewatch/tail_mode/handlers/delete.rb +11 -0
  23. data/lib/filewatch/tail_mode/handlers/grow.rb +11 -0
  24. data/lib/filewatch/tail_mode/handlers/shrink.rb +20 -0
  25. data/lib/filewatch/tail_mode/handlers/timeout.rb +10 -0
  26. data/lib/filewatch/tail_mode/handlers/unignore.rb +37 -0
  27. data/lib/filewatch/tail_mode/processor.rb +209 -0
  28. data/lib/filewatch/watch.rb +107 -0
  29. data/lib/filewatch/watched_file.rb +226 -0
  30. data/lib/filewatch/watched_files_collection.rb +84 -0
  31. data/lib/filewatch/winhelper.rb +65 -0
  32. data/lib/jars/filewatch-1.0.0.jar +0 -0
  33. data/lib/logstash/inputs/delete_completed_file_handler.rb +9 -0
  34. data/lib/logstash/inputs/file.rb +162 -107
  35. data/lib/logstash/inputs/file_listener.rb +61 -0
  36. data/lib/logstash/inputs/log_completed_file_handler.rb +13 -0
  37. data/logstash-input-file.gemspec +5 -4
  38. data/spec/filewatch/buftok_spec.rb +24 -0
  39. data/spec/filewatch/reading_spec.rb +128 -0
  40. data/spec/filewatch/sincedb_record_serializer_spec.rb +71 -0
  41. data/spec/filewatch/spec_helper.rb +120 -0
  42. data/spec/filewatch/tailing_spec.rb +440 -0
  43. data/spec/filewatch/watched_file_spec.rb +38 -0
  44. data/spec/filewatch/watched_files_collection_spec.rb +73 -0
  45. data/spec/filewatch/winhelper_spec.rb +22 -0
  46. data/spec/fixtures/compressed.log.gz +0 -0
  47. data/spec/fixtures/compressed.log.gzip +0 -0
  48. data/spec/fixtures/invalid_utf8.gbk.log +2 -0
  49. data/spec/fixtures/no-final-newline.log +2 -0
  50. data/spec/fixtures/uncompressed.log +2 -0
  51. data/spec/{spec_helper.rb → helpers/spec_helper.rb} +14 -41
  52. data/spec/inputs/file_read_spec.rb +155 -0
  53. data/spec/inputs/{file_spec.rb → file_tail_spec.rb} +55 -52
  54. metadata +96 -28
@@ -0,0 +1,87 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch
4
+ # Tracks the position and expiry of the offset of a file-of-interest
5
+ class SincedbValue
6
+ attr_reader :last_changed_at, :watched_file, :path_in_sincedb
7
+
8
+ def initialize(position, last_changed_at = nil, watched_file = nil)
9
+ @position = position # this is the value read from disk
10
+ @last_changed_at = last_changed_at
11
+ @watched_file = watched_file
12
+ touch if @last_changed_at.nil? || @last_changed_at.zero?
13
+ end
14
+
15
+ def add_path_in_sincedb(path)
16
+ @path_in_sincedb = path # can be nil
17
+ self
18
+ end
19
+
20
+ def last_changed_at_expires(duration)
21
+ @last_changed_at + duration
22
+ end
23
+
24
+ def position
25
+ # either the value from disk or the current wf position
26
+ @watched_file.nil? ? @position : @watched_file.bytes_read
27
+ end
28
+
29
+ def update_position(pos)
30
+ touch
31
+ if @watched_file.nil?
32
+ @position = pos
33
+ else
34
+ @watched_file.update_bytes_read(pos)
35
+ end
36
+ end
37
+
38
+ def increment_position(pos)
39
+ touch
40
+ if watched_file.nil?
41
+ @position += pos
42
+ else
43
+ @watched_file.increment_bytes_read(pos)
44
+ end
45
+ end
46
+
47
+ def set_watched_file(watched_file)
48
+ touch
49
+ @watched_file = watched_file
50
+ end
51
+
52
+ def touch
53
+ @last_changed_at = Time.now.to_f
54
+ end
55
+
56
+ def to_s
57
+ # consider serializing the watched_file state as well
58
+ "#{position} #{last_changed_at}".tap do |s|
59
+ if @watched_file.nil?
60
+ s.concat(" ").concat(@path_in_sincedb) unless @path_in_sincedb.nil?
61
+ else
62
+ s.concat(" ").concat(@watched_file.path)
63
+ end
64
+ end
65
+ end
66
+
67
+ def clear_watched_file
68
+ @watched_file = nil
69
+ end
70
+
71
+ def unset_watched_file
72
+ # cache the position
73
+ # we don't cache the path here because we know we are done with this file.
74
+ # either due via the `delete` handling
75
+ # or when read mode is done with a file.
76
+ # in the case of `delete` if the file was renamed then @watched_file is the
77
+ # watched_file of the previous path and the new path will be discovered and
78
+ # it should have the same inode as before.
79
+ # The key from the new watched_file should then locate this entry and we
80
+ # can resume from the cached position
81
+ return if @watched_file.nil?
82
+ wf = @watched_file
83
+ @watched_file = nil
84
+ @position = wf.bytes_read
85
+ end
86
+ end
87
+ end
@@ -0,0 +1,124 @@
1
+ # encoding: utf-8
2
+ require "logstash/util/loggable"
3
+
4
+ module FileWatch module TailMode module Handlers
5
+ class Base
6
+ include LogStash::Util::Loggable
7
+ attr_reader :sincedb_collection
8
+
9
+ def initialize(sincedb_collection, observer, settings)
10
+ @settings = settings
11
+ @sincedb_collection = sincedb_collection
12
+ @observer = observer
13
+ end
14
+
15
+ def handle(watched_file)
16
+ logger.debug("handling: #{watched_file.path}")
17
+ unless watched_file.has_listener?
18
+ watched_file.set_listener(@observer)
19
+ end
20
+ handle_specifically(watched_file)
21
+ end
22
+
23
+ def handle_specifically(watched_file)
24
+ # some handlers don't need to define this method
25
+ end
26
+
27
+ def update_existing_specifically(watched_file, sincedb_value)
28
+ # when a handler subclass does not implement this then do nothing
29
+ end
30
+
31
+ private
32
+
33
+ def read_to_eof(watched_file)
34
+ changed = false
35
+ # from a real config (has 102 file inputs)
36
+ # -- This cfg creates a file input for every log file to create a dedicated file pointer and read all file simultaneously
37
+ # -- If we put all log files in one file input glob we will have indexing delay, because Logstash waits until the first file becomes EOF
38
+ # by allowing the user to specify a combo of `file_chunk_count` X `file_chunk_size`...
39
+ # we enable the pseudo parallel processing of each file.
40
+ # user also has the option to specify a low `stat_interval` and a very high `discover_interval`to respond
41
+ # quicker to changing files and not allowing too much content to build up before reading it.
42
+ @settings.file_chunk_count.times do
43
+ begin
44
+ data = watched_file.file_read(@settings.file_chunk_size)
45
+ lines = watched_file.buffer_extract(data)
46
+ logger.warn("read_to_eof: no delimiter found in current chunk") if lines.empty?
47
+ changed = true
48
+ lines.each do |line|
49
+ watched_file.listener.accept(line)
50
+ sincedb_collection.increment(watched_file.sincedb_key, line.bytesize + @settings.delimiter_byte_size)
51
+ end
52
+ rescue EOFError
53
+ # it only makes sense to signal EOF in "read" mode not "tail"
54
+ break
55
+ rescue Errno::EWOULDBLOCK, Errno::EINTR
56
+ watched_file.listener.error
57
+ break
58
+ rescue => e
59
+ logger.error("read_to_eof: general error reading #{watched_file.path}", "error" => e.inspect, "backtrace" => e.backtrace.take(4))
60
+ watched_file.listener.error
61
+ break
62
+ end
63
+ end
64
+ sincedb_collection.request_disk_flush if changed
65
+ end
66
+
67
+ def open_file(watched_file)
68
+ return true if watched_file.file_open?
69
+ logger.debug("opening #{watched_file.path}")
70
+ begin
71
+ watched_file.open
72
+ rescue
73
+ # don't emit this message too often. if a file that we can't
74
+ # read is changing a lot, we'll try to open it more often, and spam the logs.
75
+ now = Time.now.to_i
76
+ logger.warn("open_file OPEN_WARN_INTERVAL is '#{OPEN_WARN_INTERVAL}'")
77
+ if watched_file.last_open_warning_at.nil? || now - watched_file.last_open_warning_at > OPEN_WARN_INTERVAL
78
+ logger.warn("failed to open #{watched_file.path}: #{$!.inspect}, #{$!.backtrace.take(3)}")
79
+ watched_file.last_open_warning_at = now
80
+ else
81
+ logger.debug("suppressed warning for `failed to open` #{watched_file.path}: #{$!.inspect}")
82
+ end
83
+ watched_file.watch # set it back to watch so we can try it again
84
+ end
85
+ if watched_file.file_open?
86
+ watched_file.listener.opened
87
+ true
88
+ else
89
+ false
90
+ end
91
+ end
92
+
93
+ def add_or_update_sincedb_collection(watched_file)
94
+ sincedb_value = @sincedb_collection.find(watched_file)
95
+ if sincedb_value.nil?
96
+ add_new_value_sincedb_collection(watched_file)
97
+ elsif sincedb_value.watched_file == watched_file
98
+ update_existing_sincedb_collection_value(watched_file, sincedb_value)
99
+ else
100
+ logger.warn? && logger.warn("mismatch on sincedb_value.watched_file, this should have been handled by Discoverer")
101
+ end
102
+ watched_file.initial_completed
103
+ end
104
+
105
+ def update_existing_sincedb_collection_value(watched_file, sincedb_value)
106
+ logger.debug("update_existing_sincedb_collection_value: #{watched_file.path}, last value #{sincedb_value.position}, cur size #{watched_file.last_stat_size}")
107
+ update_existing_specifically(watched_file, sincedb_value)
108
+ end
109
+
110
+ def add_new_value_sincedb_collection(watched_file)
111
+ sincedb_value = get_new_value_specifically(watched_file)
112
+ logger.debug("add_new_value_sincedb_collection: #{watched_file.path}", "position" => sincedb_value.position)
113
+ sincedb_collection.set(watched_file.sincedb_key, sincedb_value)
114
+ end
115
+
116
+ def get_new_value_specifically(watched_file)
117
+ position = @settings.start_new_files_at == :beginning ? 0 : watched_file.last_stat_size
118
+ value = SincedbValue.new(position)
119
+ value.set_watched_file(watched_file)
120
+ watched_file.update_bytes_read(position)
121
+ value
122
+ end
123
+ end
124
+ end end end
@@ -0,0 +1,17 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Create < Base
5
+ def handle_specifically(watched_file)
6
+ if open_file(watched_file)
7
+ add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key)
8
+ end
9
+ end
10
+
11
+ def update_existing_specifically(watched_file, sincedb_value)
12
+ # sincedb_value is the source of truth
13
+ position = sincedb_value.position
14
+ watched_file.update_bytes_read(position)
15
+ end
16
+ end
17
+ end end end
@@ -0,0 +1,21 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class CreateInitial < Base
5
+ def handle_specifically(watched_file)
6
+ if open_file(watched_file)
7
+ add_or_update_sincedb_collection(watched_file)
8
+ end
9
+ end
10
+
11
+ def update_existing_specifically(watched_file, sincedb_value)
12
+ position = watched_file.last_stat_size
13
+ if @settings.start_new_files_at == :beginning
14
+ position = 0
15
+ end
16
+ logger.debug("update_existing_specifically - #{watched_file.path}: seeking to #{position}")
17
+ watched_file.update_bytes_read(position)
18
+ sincedb_value.update_position(position)
19
+ end
20
+ end
21
+ end end end
@@ -0,0 +1,11 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Delete < Base
5
+ def handle_specifically(watched_file)
6
+ watched_file.listener.deleted
7
+ sincedb_collection.unset_watched_file(watched_file)
8
+ watched_file.file_close
9
+ end
10
+ end
11
+ end end end
@@ -0,0 +1,11 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Grow < Base
5
+ def handle_specifically(watched_file)
6
+ watched_file.file_seek(watched_file.bytes_read)
7
+ logger.debug("reading to eof: #{watched_file.path}")
8
+ read_to_eof(watched_file)
9
+ end
10
+ end
11
+ end end end
@@ -0,0 +1,20 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Shrink < Base
5
+ def handle_specifically(watched_file)
6
+ add_or_update_sincedb_collection(watched_file)
7
+ watched_file.file_seek(watched_file.bytes_read)
8
+ logger.debug("reading to eof: #{watched_file.path}")
9
+ read_to_eof(watched_file)
10
+ end
11
+
12
+ def update_existing_specifically(watched_file, sincedb_value)
13
+ # we have a match but size is smaller
14
+ # set all to zero
15
+ logger.debug("update_existing_specifically: #{watched_file.path}: was truncated seeking to beginning")
16
+ watched_file.update_bytes_read(0) if watched_file.bytes_read != 0
17
+ sincedb_value.update_position(0)
18
+ end
19
+ end
20
+ end end end
@@ -0,0 +1,10 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Timeout < Base
5
+ def handle_specifically(watched_file)
6
+ watched_file.listener.timed_out
7
+ watched_file.file_close
8
+ end
9
+ end
10
+ end end end
@@ -0,0 +1,37 @@
1
+ # encoding: utf-8
2
+
3
+ module FileWatch module TailMode module Handlers
4
+ class Unignore < Base
5
+ # a watched file can be put straight into the ignored state
6
+ # before any other handling has been done
7
+ # at a minimum we create or associate a sincedb value
8
+ def handle_specifically(watched_file)
9
+ add_or_update_sincedb_collection(watched_file) unless sincedb_collection.member?(watched_file.sincedb_key)
10
+ end
11
+
12
+ def get_new_value_specifically(watched_file)
13
+ # for file initially ignored their bytes_read was set to stat.size
14
+ # use this value not the `start_new_files_at` for the position
15
+ # logger.debug("get_new_value_specifically", "watched_file" => watched_file.inspect)
16
+ SincedbValue.new(watched_file.bytes_read).tap do |val|
17
+ val.set_watched_file(watched_file)
18
+ end
19
+ end
20
+
21
+ def update_existing_specifically(watched_file, sincedb_value)
22
+ # when this watched_file was ignored it had it bytes_read set to eof
23
+ # now the file has changed (watched_file.size_changed?)
24
+ # it has been put into the watched state so when it becomes active
25
+ # we will handle grow or shrink
26
+ # for now we seek to where we were before the file got ignored (grow)
27
+ # or to the start (shrink)
28
+ position = 0
29
+ if watched_file.shrunk?
30
+ watched_file.update_bytes_read(0)
31
+ else
32
+ position = watched_file.bytes_read
33
+ end
34
+ sincedb_value.update_position(position)
35
+ end
36
+ end
37
+ end end end
@@ -0,0 +1,209 @@
1
+ # encoding: utf-8
2
+ require "logstash/util/loggable"
3
+ require_relative "handlers/base"
4
+ require_relative "handlers/create_initial"
5
+ require_relative "handlers/create"
6
+ require_relative "handlers/delete"
7
+ require_relative "handlers/grow"
8
+ require_relative "handlers/shrink"
9
+ require_relative "handlers/timeout"
10
+ require_relative "handlers/unignore"
11
+
12
+ module FileWatch module TailMode
13
+ # Must handle
14
+ # :create_initial - file is discovered and we have no record of it in the sincedb
15
+ # :create - file is discovered and we have seen it before in the sincedb
16
+ # :grow - file has more content
17
+ # :shrink - file has less content
18
+ # :delete - file can't be read
19
+ # :timeout - file is closable
20
+ # :unignore - file was ignored, but have now received new content
21
+ class Processor
22
+ include LogStash::Util::Loggable
23
+
24
+ attr_reader :watch, :deletable_filepaths
25
+
26
+ def initialize(settings)
27
+ @settings = settings
28
+ @deletable_filepaths = []
29
+ end
30
+
31
+ def add_watch(watch)
32
+ @watch = watch
33
+ self
34
+ end
35
+
36
+ def initialize_handlers(sincedb_collection, observer)
37
+ @create_initial = Handlers::CreateInitial.new(sincedb_collection, observer, @settings)
38
+ @create = Handlers::Create.new(sincedb_collection, observer, @settings)
39
+ @grow = Handlers::Grow.new(sincedb_collection, observer, @settings)
40
+ @shrink = Handlers::Shrink.new(sincedb_collection, observer, @settings)
41
+ @delete = Handlers::Delete.new(sincedb_collection, observer, @settings)
42
+ @timeout = Handlers::Timeout.new(sincedb_collection, observer, @settings)
43
+ @unignore = Handlers::Unignore.new(sincedb_collection, observer, @settings)
44
+ end
45
+
46
+ def create(watched_file)
47
+ @create.handle(watched_file)
48
+ end
49
+
50
+ def create_initial(watched_file)
51
+ @create_initial.handle(watched_file)
52
+ end
53
+
54
+ def grow(watched_file)
55
+ @grow.handle(watched_file)
56
+ end
57
+
58
+ def shrink(watched_file)
59
+ @shrink.handle(watched_file)
60
+ end
61
+
62
+ def delete(watched_file)
63
+ @delete.handle(watched_file)
64
+ end
65
+
66
+ def timeout(watched_file)
67
+ @timeout.handle(watched_file)
68
+ end
69
+
70
+ def unignore(watched_file)
71
+ @unignore.handle(watched_file)
72
+ end
73
+
74
+ def process_closed(watched_files)
75
+ logger.debug("Closed processing")
76
+ # Handles watched_files in the closed state.
77
+ # if its size changed it is put into the watched state
78
+ watched_files.select {|wf| wf.closed? }.each do |watched_file|
79
+ path = watched_file.path
80
+ begin
81
+ watched_file.restat
82
+ if watched_file.size_changed?
83
+ # if the closed file changed, move it to the watched state
84
+ # not to active state because we want to respect the active files window.
85
+ watched_file.watch
86
+ end
87
+ rescue Errno::ENOENT
88
+ # file has gone away or we can't read it anymore.
89
+ common_deleted_reaction(watched_file, "Closed")
90
+ rescue => e
91
+ common_error_reaction(path, e, "Closed")
92
+ end
93
+ break if watch.quit?
94
+ end
95
+ end
96
+
97
+ def process_ignored(watched_files)
98
+ logger.debug("Ignored processing")
99
+ # Handles watched_files in the ignored state.
100
+ # if its size changed:
101
+ # put it in the watched state
102
+ # invoke unignore
103
+ watched_files.select {|wf| wf.ignored? }.each do |watched_file|
104
+ path = watched_file.path
105
+ begin
106
+ watched_file.restat
107
+ if watched_file.size_changed?
108
+ watched_file.watch
109
+ unignore(watched_file)
110
+ end
111
+ rescue Errno::ENOENT
112
+ # file has gone away or we can't read it anymore.
113
+ common_deleted_reaction(watched_file, "Ignored")
114
+ rescue => e
115
+ common_error_reaction(path, e, "Ignored")
116
+ end
117
+ break if watch.quit?
118
+ end
119
+ end
120
+
121
+ def process_watched(watched_files)
122
+ logger.debug("Watched processing")
123
+ # Handles watched_files in the watched state.
124
+ # for a slice of them:
125
+ # move to the active state
126
+ # and we allow the block to open the file and create a sincedb collection record if needed
127
+ # some have never been active and some have
128
+ # those that were active before but are watched now were closed under constraint
129
+
130
+ # how much of the max active window is available
131
+ to_take = @settings.max_active - watched_files.count{|wf| wf.active?}
132
+ if to_take > 0
133
+ watched_files.select {|wf| wf.watched?}.take(to_take).each do |watched_file|
134
+ path = watched_file.path
135
+ begin
136
+ watched_file.restat
137
+ watched_file.activate
138
+ if watched_file.initial?
139
+ create_initial(watched_file)
140
+ else
141
+ create(watched_file)
142
+ end
143
+ rescue Errno::ENOENT
144
+ # file has gone away or we can't read it anymore.
145
+ common_deleted_reaction(watched_file, "Watched")
146
+ rescue => e
147
+ common_error_reaction(path, e, "Watched")
148
+ end
149
+ break if watch.quit?
150
+ end
151
+ else
152
+ now = Time.now.to_i
153
+ if (now - watch.lastwarn_max_files) > MAX_FILES_WARN_INTERVAL
154
+ waiting = watched_files.size - @settings.max_active
155
+ logger.warn(@settings.max_warn_msg + ", files yet to open: #{waiting}")
156
+ watch.lastwarn_max_files = now
157
+ end
158
+ end
159
+ end
160
+
161
+ def process_active(watched_files)
162
+ logger.debug("Active processing")
163
+ # Handles watched_files in the active state.
164
+ # it has been read once - unless they were empty at the time
165
+ watched_files.select {|wf| wf.active? }.each do |watched_file|
166
+ path = watched_file.path
167
+ begin
168
+ watched_file.restat
169
+ rescue Errno::ENOENT
170
+ # file has gone away or we can't read it anymore.
171
+ common_deleted_reaction(watched_file, "Active")
172
+ next
173
+ rescue => e
174
+ common_error_reaction(path, e, "Active")
175
+ next
176
+ end
177
+ break if watch.quit?
178
+ if watched_file.grown?
179
+ logger.debug("Active - file grew: #{path}: new size is #{watched_file.last_stat_size}, old size #{watched_file.bytes_read}")
180
+ grow(watched_file)
181
+ elsif watched_file.shrunk?
182
+ # we don't update the size here, its updated when we actually read
183
+ logger.debug("Active - file shrunk #{path}: new size is #{watched_file.last_stat_size}, old size #{watched_file.bytes_read}")
184
+ shrink(watched_file)
185
+ else
186
+ # same size, do nothing
187
+ end
188
+ # can any active files be closed to make way for waiting files?
189
+ if watched_file.file_closable?
190
+ logger.debug("Watch each: active: file expired: #{path}")
191
+ timeout(watched_file)
192
+ watched_file.close
193
+ end
194
+ end
195
+ end
196
+
197
+ def common_deleted_reaction(watched_file, action)
198
+ # file has gone away or we can't read it anymore.
199
+ watched_file.unwatch
200
+ delete(watched_file)
201
+ deletable_filepaths << watched_file.path
202
+ logger.debug("#{action} - stat failed: #{watched_file.path}, removing from collection")
203
+ end
204
+
205
+ def common_error_reaction(path, error, action)
206
+ logger.error("#{action} - other error #{path}: (#{error.message}, #{error.backtrace.take(8).inspect})")
207
+ end
208
+ end
209
+ end end