bleak_house 3.3 → 3.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data.tar.gz.sig CHANGED
Binary file
data/CHANGELOG CHANGED
@@ -1,4 +1,6 @@
1
1
 
2
+ v3.4. Clearer output descriptions; work around a Marshal bug on x64; fix for missing immortal leaks.
3
+
2
4
  v3.3. Build Ruby in gem install step; bundle Ruby 1.8.6 source; fixes for truncated final frames.
3
5
 
4
6
  v3.2. Use Ccsv for faster parsing.
data/Manifest CHANGED
@@ -18,7 +18,6 @@ lib/bleak_house/rails.rb
18
18
  lib/bleak_house/support/core_extensions.rb
19
19
  lib/bleak_house/support/rake.rb
20
20
  lib/bleak_house.rb
21
- lib/vendor/lightcsv.rb
22
21
  LICENSE
23
22
  LICENSE_BSD
24
23
  LICENSE_RUBY
@@ -17,14 +17,14 @@ module BleakHouse
17
17
  -6 => 'heap/free'
18
18
  }
19
19
 
20
- INITIAL_SKIP = 10
20
+ INITIAL_SKIP = 15 # XXX Might be better as a per-tag skip but that gets kinda complicated
21
21
 
22
- CLASS_KEYS = eval('[nil, ' + # skip 0
22
+ CLASS_KEYS = eval('[nil, ' + # Skip 0 so that the output of String#to_s is useful
23
23
  open(
24
24
  File.dirname(__FILE__) + '/../../../ext/bleak_house/logger/snapshot.h'
25
25
  ).read[/\{(.*?)\}/m, 1] + ']')
26
26
 
27
- def self.calculate!(frame, index, total)
27
+ def self.calculate!(frame, index, total, obj_count = nil)
28
28
  bsize = frame['births'].size
29
29
  dsize = frame['deaths'].size
30
30
 
@@ -38,7 +38,7 @@ module BleakHouse
38
38
  0
39
39
  end
40
40
 
41
- puts " #{index * 100 / total}%: #{frame['meta']['tag']} (#{bsize} births, #{dsize} deaths, ratio #{format('%.2f', frame['meta']['ratio'])}, impact #{format('%.2f', frame['meta']['impact'])})"
41
+ puts " F#{index}:#{total} (#{index * 100 / total}%): #{frame['meta']['tag']} (#{obj_count.to_s + ' population, ' if obj_count}#{bsize} births, #{dsize} deaths, ratio #{format('%.2f', frame['meta']['ratio'])}, impact #{format('%.2f', frame['meta']['impact'])})"
42
42
  end
43
43
 
44
44
  # Parses and correlates a BleakHouse::Logger output file.
@@ -62,16 +62,14 @@ module BleakHouse
62
62
  # Cache is fresh
63
63
  puts "Using cache"
64
64
  frames = Marshal.load(File.open(cachefile).read)
65
-
66
- puts "#{frames.size} frames"
67
-
68
- frames[0..-3].each_with_index do |frame, index|
65
+ puts "#{frames.size - 1} frames"
66
+ frames[0..-2].each_with_index do |frame, index|
69
67
  calculate!(frame, index + 1, frames.size - 1)
70
68
  end
71
69
 
72
70
  else
73
71
  # Rebuild frames
74
- total_frames = `grep '^-1' #{logfile} | wc`.to_i
72
+ total_frames = `grep '^-1' #{logfile} | wc`.to_i - 2
75
73
 
76
74
  puts "#{total_frames} frames"
77
75
 
@@ -95,13 +93,14 @@ module BleakHouse
95
93
  last_population = population
96
94
 
97
95
  # assign births
98
- frame['births'] = frame['objects'].slice(births)
96
+ frame['births'] = frame['objects'].slice(births).to_a # Work around a Marshal bug
99
97
 
100
98
  # assign deaths to previous frame
101
99
  if final = frames[-2]
102
- final['deaths'] = final['objects'].slice(deaths)
100
+ final['deaths'] = final['objects'].slice(deaths).to_a # Work around a Marshal bug
101
+ obj_count = final['objects'].size
103
102
  final.delete 'objects'
104
- calculate!(final, frames.size - 1, total_frames)
103
+ calculate!(final, frames.size - 1, total_frames, obj_count)
105
104
  end
106
105
  end
107
106
 
@@ -121,20 +120,30 @@ module BleakHouse
121
120
  end
122
121
  end
123
122
 
123
+ frames = frames[0..-2]
124
+ frames.last['objects'] = frames.last['objects'].to_a # Work around a Marshal bug
125
+
124
126
  # Cache the result
125
127
  File.open(cachefile, 'w') do |f|
126
128
  f.write Marshal.dump(frames)
127
129
  end
128
130
 
129
131
  end
130
-
132
+
133
+ # Convert births back to hashes, necessary due to the Marshal workaround
134
+ frames.each do |frame|
135
+ frame['births'] = Hash[*frame['births'].flatten]
136
+ end
137
+
131
138
  # See what objects are still laying around
132
- population = frames[-2]['objects'].reject do |key, value|
139
+ population = frames.last['objects'].reject do |key, value|
133
140
  frames.first['births'][key] == value
134
141
  end
135
142
 
136
- # Remove bogus frames
137
- frames = frames[INITIAL_SKIP..-3]
143
+ puts "\n#{frames.size - 1} full frames. Removing #{INITIAL_SKIP} frames from each end of the run to account for\nstartup overhead and GC lag."
144
+
145
+ # Remove border frames
146
+ frames = frames[INITIAL_SKIP..-INITIAL_SKIP]
138
147
 
139
148
  total_births = frames.inject(0) do |births, frame|
140
149
  births + frame['births'].size
@@ -143,10 +152,10 @@ module BleakHouse
143
152
  deaths + frame['deaths'].size
144
153
  end
145
154
 
146
- puts "\n#{total_births} births, #{total_deaths} deaths."
155
+ puts "\n#{total_births} total births, #{total_deaths} total deaths, #{population.size} uncollected objects."
147
156
 
148
157
  leakers = {}
149
-
158
+
150
159
  # Find the sources of the leftover objects in the final population
151
160
  population.each do |id, klass|
152
161
  leaker = frames.detect do |frame|
@@ -169,23 +178,19 @@ module BleakHouse
169
178
  Hash[*value.flatten].values.inject(0) {|i, v| i - v}
170
179
  end
171
180
 
172
- puts "\nTags sorted by immortal leaks:"
173
- leakers.each do |tag, value|
174
- requests = frames.select do |frame|
175
- frame['meta']['tag'] == tag
176
- end.size
177
- values = value.map do |klass, count|
178
- count = count/requests
179
- [klass, count]
180
- end.select do |klass, count|
181
- count > 0
182
- end
183
- if values.any?
184
- puts " #{tag} leaks, averaged over #{requests} requests:"
185
- values.each do |klass, count|
181
+ if leakers.any?
182
+ puts "\nTags sorted by persistent uncollected objects. These objects did not exist at\nstartup, were instantiated by the associated tags, and were never garbage\ncollected:"
183
+ leakers.each do |tag, value|
184
+ requests = frames.select do |frame|
185
+ frame['meta']['tag'] == tag
186
+ end.size
187
+ puts " #{tag} leaked (over #{requests} requests):"
188
+ value.each do |klass, count|
186
189
  puts " #{count} #{klass}"
187
190
  end
188
191
  end
192
+ else
193
+ puts "\nNo persistent uncollected objects found for any tags."
189
194
  end
190
195
 
191
196
  impacts = {}
@@ -200,15 +205,15 @@ module BleakHouse
200
205
  impact.nan? ? 0 : -impact
201
206
  end
202
207
 
203
- puts "\nTags sorted by impact * ratio:"
208
+ puts "\nTags sorted by average impact * ratio. Impact is the log10 of the size of the"
209
+ puts "change in object count for a frame:"
204
210
 
205
211
  impacts.each do |tag, total|
206
212
  puts " #{format('%.4f', total).rjust(7)}: #{tag}"
207
213
  end
208
-
209
- puts "\nBye"
210
-
211
214
  end
212
215
 
216
+ puts "\nDone"
217
+
213
218
  end
214
219
  end
metadata CHANGED
@@ -1,10 +1,10 @@
1
1
  --- !ruby/object:Gem::Specification
2
- rubygems_version: 0.9.4
3
- specification_version: 1
2
+ rubygems_version: 0.9.4.6
3
+ specification_version: 2
4
4
  name: bleak_house
5
5
  version: !ruby/object:Gem::Version
6
- version: "3.3"
7
- date: 2007-10-11 00:00:00 -04:00
6
+ version: "3.4"
7
+ date: 2007-10-31 00:00:00 -04:00
8
8
  summary: A library for finding memory leaks.
9
9
  require_paths:
10
10
  - lib
@@ -17,11 +17,17 @@ autorequire:
17
17
  default_executable:
18
18
  bindir: bin
19
19
  has_rdoc: true
20
- required_ruby_version: !ruby/object:Gem::Version::Requirement
20
+ required_ruby_version: !ruby/object:Gem::Requirement
21
21
  requirements:
22
- - - ">"
22
+ - - ">="
23
23
  - !ruby/object:Gem::Version
24
- version: 0.0.0
24
+ version: "0"
25
+ version:
26
+ required_rubygems_version: !ruby/object:Gem::Requirement
27
+ requirements:
28
+ - - ">="
29
+ - !ruby/object:Gem::Version
30
+ version: "0"
25
31
  version:
26
32
  platform: ruby
27
33
  signing_key:
@@ -72,7 +78,6 @@ files:
72
78
  - lib/bleak_house/support/core_extensions.rb
73
79
  - lib/bleak_house/support/rake.rb
74
80
  - lib/bleak_house.rb
75
- - lib/vendor/lightcsv.rb
76
81
  - LICENSE
77
82
  - LICENSE_BSD
78
83
  - LICENSE_RUBY
@@ -101,9 +106,9 @@ dependencies:
101
106
  - !ruby/object:Gem::Dependency
102
107
  name: ccsv
103
108
  version_requirement:
104
- version_requirements: !ruby/object:Gem::Version::Requirement
109
+ version_requirements: !ruby/object:Gem::Requirement
105
110
  requirements:
106
- - - ">"
111
+ - - ">="
107
112
  - !ruby/object:Gem::Version
108
- version: 0.0.0
113
+ version: "0"
109
114
  version:
metadata.gz.sig CHANGED
Binary file
@@ -1,168 +0,0 @@
1
- # = LightCsv
2
- # CSV parser
3
- #
4
- # $Id: lightcsv.rb 76 2007-04-15 14:34:23Z tommy $
5
- # Copyright:: 2007 (C) TOMITA Masahiro <tommy@tmtm.org>
6
- # License:: Ruby's
7
- # Homepage:: http://tmtm.org/ja/ruby/lightcsv
8
-
9
- require "strscan"
10
-
11
- # == CSV のパース
12
- # 各レコードはカラムを要素とする配列である。
13
- # レコードの区切りは LF,CR,CRLF のいずれか。
14
- #
15
- # 以下が csv.rb と異なる。
16
- # * 空行は [nil] ではなく [] になる。
17
- # * 「"」で括られていない空カラムは nil ではなく "" になる。
18
- #
19
- # == 例
20
- # * CSVファイルのレコード毎にブロックを繰り返す。
21
- # LightCsv.foreach(filename){|row| ...}
22
- # 次と同じ。
23
- # LightCsv.open(filename){|csv| csv.each{|row| ...}}
24
- #
25
- # * CSVファイルの全レコードを返す。
26
- # LightCsv.readlines(filename) # => [[col1,col2,...],...]
27
- # 次と同じ。
28
- # LightCsv.open(filename){|csv| csv.map}
29
- #
30
- # * CSV文字列のレコード毎にブロックを繰り返す。
31
- # LightCsv.parse("a1,a2,..."){|row| ...}
32
- # 次と同じ。
33
- # LightCsv.new("a1,a2,...").each{|row| ...}
34
- #
35
- # * CSV文字列の全レコードを返す。
36
- # LightCsv.parse("a1,a2,...") # => [[a1,a2,...],...]
37
- # 次と同じ。
38
- # LightCsv.new("a1,a2,...").map
39
- #
40
- class LightCsv
41
- include Enumerable
42
-
43
- # == パースできない形式の場合に発生する例外
44
- # InvalidFormat#message は処理できなかった位置から 10バイト文の文字列を返す。
45
- class InvalidFormat < RuntimeError; end
46
-
47
- # ファイルの各レコード毎にブロックを繰り返す。
48
- # ブロック引数はレコードを表す配列。
49
- def self.foreach(filename, &block)
50
- self.open(filename) do |f|
51
- f.each(&block)
52
- end
53
- end
54
-
55
- # ファイルの全レコードをレコードの配列で返す。
56
- def self.readlines(filename)
57
- self.open(filename) do |f|
58
- return f.map
59
- end
60
- end
61
-
62
- # CSV文字列の全レコードをレコードの配列で返す。
63
- # ブロックが与えられた場合は、レコード毎にブロックを繰り返す。
64
- # ブロック引数はレコードを表す配列。
65
- def self.parse(string, &block)
66
- unless block
67
- return self.new(string).map
68
- end
69
- self.new(string).each do |row|
70
- block.call row
71
- end
72
- return nil
73
- end
74
-
75
- # ファイルをオープンして LightCsv オブジェクトを返す。
76
- # ブロックを与えた場合は LightCsv オブジェクトを引数としてブロックを実行する。
77
- def self.open(filename, &block)
78
- f = File.open(filename)
79
- csv = self.new(f)
80
- if block
81
- begin
82
- return block.call(csv)
83
- ensure
84
- csv.close
85
- end
86
- else
87
- return csv
88
- end
89
- end
90
-
91
- # LightCsv オブジェクトを生成する。
92
- # _src_ は String か IO。
93
- def initialize(src)
94
- if src.kind_of? String
95
- @file = nil
96
- @ss = StringScanner.new(src)
97
- else
98
- @file = src
99
- @ss = StringScanner.new("")
100
- end
101
- @buf = ""
102
- @bufsize = 64*1024
103
- end
104
- attr_accessor :bufsize
105
-
106
- # LightCsv オブジェクトに関連したファイルをクローズする。
107
- def close()
108
- @file.close if @file
109
- end
110
-
111
- # 1レコードを返す。データの最後の場合は nil を返す。
112
- # 空行の場合は空配列([])を返す。
113
- # 空カラムは「"」で括られているか否かにかかわらず空文字列("")になる。
114
- def shift()
115
- return nil if @ss.eos? and ! read_next_data
116
- cols = []
117
- while true
118
- if @ss.eos? and ! read_next_data
119
- cols << ""
120
- break
121
- end
122
- if @ss.scan(/\"/n)
123
- until @ss.scan(/(?:\"\"|[^\"])*\"/n)
124
- read_next_data or raise InvalidFormat, @ss.rest[0,10]
125
- end
126
- cols << @ss.matched.chop.gsub(/\"\"/n, '"')
127
- else
128
- col = @ss.scan(/[^\",\r\n]*/n)
129
- while @ss.eos? and read_next_data
130
- col << @ss.scan(/[^\",\r\n]*/n)
131
- end
132
- cols << col
133
- end
134
- unless @ss.scan(/,/n)
135
- break if @ss.scan(/\r\n/n)
136
- unless @ss.rest_size < 2 and read_next_data and @ss.scan(/,/n)
137
- break if @ss.scan(/\r\n|\n|\r|\z/n)
138
- read_next_data
139
- raise InvalidFormat, @ss.rest[0,10]
140
- end
141
- end
142
- end
143
- cols.clear if cols.size == 1 and cols.first.empty?
144
- cols
145
- end
146
-
147
- # 各レコード毎にブロックを繰り返す。
148
- def each()
149
- while row = shift
150
- yield row
151
- end
152
- end
153
-
154
- # 現在位置以降のレコードの配列を返す。
155
- def readlines()
156
- return map
157
- end
158
-
159
- private
160
-
161
- def read_next_data()
162
- if @file and @file.read(@bufsize, @buf)
163
- @ss.string = @ss.rest + @buf
164
- else
165
- nil
166
- end
167
- end
168
- end