ruby-prof 1.6.3-x64-mingw-ucrt → 1.7.0-x64-mingw-ucrt
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGES +7 -0
- data/ext/ruby_prof/rp_allocation.c +342 -342
- data/ext/ruby_prof/rp_call_tree.c +1 -1
- data/ext/ruby_prof/rp_call_tree.h +1 -1
- data/ext/ruby_prof/rp_call_trees.c +2 -2
- data/ext/ruby_prof/rp_call_trees.h +2 -2
- data/ext/ruby_prof/rp_measure_allocations.c +1 -1
- data/ext/ruby_prof/rp_measure_memory.c +46 -46
- data/ext/ruby_prof/rp_measure_process_time.c +1 -1
- data/ext/ruby_prof/rp_measure_wall_time.c +1 -1
- data/ext/ruby_prof/rp_measurement.c +364 -364
- data/ext/ruby_prof/rp_method.c +24 -12
- data/ext/ruby_prof/rp_method.h +5 -2
- data/ext/ruby_prof/rp_profile.c +2 -2
- data/ext/ruby_prof/rp_profile.h +36 -36
- data/ext/ruby_prof/rp_stack.c +1 -1
- data/ext/ruby_prof/rp_thread.c +1 -1
- data/ext/ruby_prof/ruby_prof.c +1 -1
- data/ext/ruby_prof/ruby_prof.h +34 -34
- data/ext/ruby_prof/vc/ruby_prof.vcxproj +5 -7
- data/lib/3.2/ruby_prof.so +0 -0
- data/lib/3.3/ruby_prof.so +0 -0
- data/lib/ruby-prof/compatibility.rb +10 -10
- data/lib/ruby-prof/exclude_common_methods.rb +9 -3
- data/lib/ruby-prof/method_info.rb +87 -85
- data/lib/ruby-prof/version.rb +1 -1
- data/ruby-prof.gemspec +1 -1
- data/test/crash2.rb +144 -0
- data/test/enumerable_test.rb +5 -5
- data/test/exclude_methods_test.rb +197 -86
- data/test/line_number_test.rb +254 -99
- data/test/measure_allocations_test.rb +422 -1
- data/test/measure_memory_test.rb +433 -1
- data/test/measure_process_time_test.rb +882 -15
- data/test/measure_wall_time_test.rb +195 -47
- data/test/method_info_test.rb +1 -1
- data/test/recursive_test.rb +198 -1
- data/test/thread_test.rb +0 -4
- metadata +7 -6
- data/lib/3.1/ruby_prof.so +0 -0
@@ -1,342 +1,342 @@
|
|
1
|
-
/* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
|
2
|
-
Please see the LICENSE file for copyright and distribution information */
|
3
|
-
|
4
|
-
#include "rp_allocation.h"
|
5
|
-
#include "rp_method.h"
|
6
|
-
|
7
|
-
VALUE cRpAllocation;
|
8
|
-
|
9
|
-
// ------ prof_allocation_t ------
|
10
|
-
prof_allocation_t* prof_allocation_create(void)
|
11
|
-
{
|
12
|
-
prof_allocation_t* result = ALLOC(prof_allocation_t);
|
13
|
-
result->count = 0;
|
14
|
-
result->klass = Qnil;
|
15
|
-
result->klass_name = Qnil;
|
16
|
-
result->object = Qnil;
|
17
|
-
result->memory = 0;
|
18
|
-
result->source_line = 0;
|
19
|
-
result->source_file = Qnil;
|
20
|
-
result->key = 0;
|
21
|
-
|
22
|
-
return result;
|
23
|
-
}
|
24
|
-
|
25
|
-
prof_allocation_t* prof_allocation_get(VALUE self)
|
26
|
-
{
|
27
|
-
/* Can't use Data_Get_Struct because that triggers the event hook
|
28
|
-
ending up in endless recursion. */
|
29
|
-
prof_allocation_t* result = RTYPEDDATA_DATA(self);
|
30
|
-
if (!result)
|
31
|
-
rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
|
32
|
-
|
33
|
-
return result;
|
34
|
-
}
|
35
|
-
|
36
|
-
static void prof_allocation_ruby_gc_free(void* data)
|
37
|
-
{
|
38
|
-
if (data)
|
39
|
-
{
|
40
|
-
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
41
|
-
allocation->object = Qnil;
|
42
|
-
}
|
43
|
-
}
|
44
|
-
|
45
|
-
void prof_allocation_free(prof_allocation_t* allocation)
|
46
|
-
{
|
47
|
-
/* Has this allocation object been accessed by Ruby? If
|
48
|
-
yes clean it up so to avoid a segmentation fault. */
|
49
|
-
if (allocation->object != Qnil)
|
50
|
-
{
|
51
|
-
RTYPEDDATA(allocation->object)->data = NULL;
|
52
|
-
allocation->object = Qnil;
|
53
|
-
}
|
54
|
-
|
55
|
-
xfree(allocation);
|
56
|
-
}
|
57
|
-
|
58
|
-
size_t prof_allocation_size(const void* data)
|
59
|
-
{
|
60
|
-
return sizeof(prof_allocation_t);
|
61
|
-
}
|
62
|
-
|
63
|
-
void prof_allocation_mark(void* data)
|
64
|
-
{
|
65
|
-
if (!data) return;
|
66
|
-
|
67
|
-
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
68
|
-
if (allocation->object != Qnil)
|
69
|
-
rb_gc_mark_movable(allocation->object);
|
70
|
-
|
71
|
-
if (allocation->klass != Qnil)
|
72
|
-
rb_gc_mark_movable(allocation->klass);
|
73
|
-
|
74
|
-
if (allocation->klass_name != Qnil)
|
75
|
-
rb_gc_mark_movable(allocation->klass_name);
|
76
|
-
|
77
|
-
if (allocation->source_file != Qnil)
|
78
|
-
rb_gc_mark(allocation->source_file);
|
79
|
-
}
|
80
|
-
|
81
|
-
void prof_allocation_compact(void* data)
|
82
|
-
{
|
83
|
-
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
84
|
-
allocation->object = rb_gc_location(allocation->object);
|
85
|
-
allocation->klass = rb_gc_location(allocation->klass);
|
86
|
-
allocation->klass_name = rb_gc_location(allocation->klass_name);
|
87
|
-
}
|
88
|
-
|
89
|
-
static const rb_data_type_t allocation_type =
|
90
|
-
{
|
91
|
-
.wrap_struct_name = "Allocation",
|
92
|
-
.function =
|
93
|
-
{
|
94
|
-
.dmark = prof_allocation_mark,
|
95
|
-
.dfree = prof_allocation_ruby_gc_free,
|
96
|
-
.dsize = prof_allocation_size,
|
97
|
-
.dcompact = prof_allocation_compact
|
98
|
-
},
|
99
|
-
.data = NULL,
|
100
|
-
.flags = RUBY_TYPED_FREE_IMMEDIATELY
|
101
|
-
};
|
102
|
-
|
103
|
-
VALUE prof_allocation_wrap(prof_allocation_t* allocation)
|
104
|
-
{
|
105
|
-
if (allocation->object == Qnil)
|
106
|
-
{
|
107
|
-
allocation->object = TypedData_Wrap_Struct(cRpAllocation, &allocation_type, allocation);
|
108
|
-
}
|
109
|
-
return allocation->object;
|
110
|
-
}
|
111
|
-
|
112
|
-
/* ====== Allocation Table ====== */
|
113
|
-
st_table* prof_allocations_create()
|
114
|
-
{
|
115
|
-
return rb_st_init_numtable();
|
116
|
-
}
|
117
|
-
|
118
|
-
static int allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
|
119
|
-
{
|
120
|
-
prof_allocation_free((prof_allocation_t*)value);
|
121
|
-
return ST_CONTINUE;
|
122
|
-
}
|
123
|
-
|
124
|
-
st_data_t allocations_key(VALUE klass, int source_line)
|
125
|
-
{
|
126
|
-
return (klass << 4) + source_line;
|
127
|
-
}
|
128
|
-
|
129
|
-
static int prof_allocations_collect(st_data_t key, st_data_t value, st_data_t result)
|
130
|
-
{
|
131
|
-
prof_allocation_t* allocation = (prof_allocation_t*)value;
|
132
|
-
VALUE arr = (VALUE)result;
|
133
|
-
rb_ary_push(arr, prof_allocation_wrap(allocation));
|
134
|
-
return ST_CONTINUE;
|
135
|
-
}
|
136
|
-
|
137
|
-
static int prof_allocations_mark_each(st_data_t key, st_data_t value, st_data_t data)
|
138
|
-
{
|
139
|
-
prof_allocation_t* allocation = (prof_allocation_t*)value;
|
140
|
-
prof_allocation_mark(allocation);
|
141
|
-
return ST_CONTINUE;
|
142
|
-
}
|
143
|
-
|
144
|
-
void prof_allocations_mark(st_table* allocations_table)
|
145
|
-
{
|
146
|
-
rb_st_foreach(allocations_table, prof_allocations_mark_each, 0);
|
147
|
-
}
|
148
|
-
|
149
|
-
void prof_allocations_free(st_table* table)
|
150
|
-
{
|
151
|
-
rb_st_foreach(table, allocations_table_free_iterator, 0);
|
152
|
-
rb_st_free_table(table);
|
153
|
-
}
|
154
|
-
|
155
|
-
prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
|
156
|
-
{
|
157
|
-
prof_allocation_t* result = NULL;
|
158
|
-
st_data_t value;
|
159
|
-
if (rb_st_lookup(table, key, &value))
|
160
|
-
{
|
161
|
-
result = (prof_allocation_t*)value;
|
162
|
-
}
|
163
|
-
|
164
|
-
return result;
|
165
|
-
}
|
166
|
-
|
167
|
-
void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
|
168
|
-
{
|
169
|
-
rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
|
170
|
-
}
|
171
|
-
|
172
|
-
prof_allocation_t* prof_allocate_increment(st_table* allocations_table, rb_trace_arg_t* trace_arg)
|
173
|
-
{
|
174
|
-
VALUE object = rb_tracearg_object(trace_arg);
|
175
|
-
if (BUILTIN_TYPE(object) == T_IMEMO)
|
176
|
-
return NULL;
|
177
|
-
|
178
|
-
VALUE klass = rb_obj_class(object);
|
179
|
-
|
180
|
-
int source_line = FIX2INT(rb_tracearg_lineno(trace_arg));
|
181
|
-
st_data_t key = allocations_key(klass, source_line);
|
182
|
-
|
183
|
-
prof_allocation_t* allocation = allocations_table_lookup(allocations_table, key);
|
184
|
-
if (!allocation)
|
185
|
-
{
|
186
|
-
allocation = prof_allocation_create();
|
187
|
-
allocation->source_line = source_line;
|
188
|
-
allocation->source_file = rb_tracearg_path(trace_arg);
|
189
|
-
allocation->klass_flags = 0;
|
190
|
-
allocation->klass = resolve_klass(klass, &allocation->klass_flags);
|
191
|
-
|
192
|
-
allocation->key = key;
|
193
|
-
allocations_table_insert(allocations_table, key, allocation);
|
194
|
-
}
|
195
|
-
|
196
|
-
allocation->count++;
|
197
|
-
allocation->memory += rb_obj_memsize_of(object);
|
198
|
-
|
199
|
-
return allocation;
|
200
|
-
}
|
201
|
-
|
202
|
-
// Returns an array of allocations
|
203
|
-
VALUE prof_allocations_wrap(st_table* allocations_table)
|
204
|
-
{
|
205
|
-
VALUE result = rb_ary_new();
|
206
|
-
rb_st_foreach(allocations_table, prof_allocations_collect, result);
|
207
|
-
return result;
|
208
|
-
}
|
209
|
-
|
210
|
-
void prof_allocations_unwrap(st_table* allocations_table, VALUE allocations)
|
211
|
-
{
|
212
|
-
for (int i = 0; i < rb_array_len(allocations); i++)
|
213
|
-
{
|
214
|
-
VALUE allocation = rb_ary_entry(allocations, i);
|
215
|
-
prof_allocation_t* allocation_data = prof_allocation_get(allocation);
|
216
|
-
rb_st_insert(allocations_table, allocation_data->key, (st_data_t)allocation_data);
|
217
|
-
}
|
218
|
-
}
|
219
|
-
|
220
|
-
/* ====== prof_allocation_t ====== */
|
221
|
-
static VALUE prof_allocation_allocate(VALUE klass)
|
222
|
-
{
|
223
|
-
prof_allocation_t* allocation = prof_allocation_create();
|
224
|
-
allocation->object = prof_allocation_wrap(allocation);
|
225
|
-
return allocation->object;
|
226
|
-
}
|
227
|
-
|
228
|
-
/* call-seq:
|
229
|
-
klass -> Class
|
230
|
-
|
231
|
-
Returns the type of Class being allocated. */
|
232
|
-
static VALUE prof_allocation_klass_name(VALUE self)
|
233
|
-
{
|
234
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
235
|
-
|
236
|
-
if (allocation->klass_name == Qnil)
|
237
|
-
allocation->klass_name = resolve_klass_name(allocation->klass, &allocation->klass_flags);
|
238
|
-
|
239
|
-
return allocation->klass_name;
|
240
|
-
}
|
241
|
-
|
242
|
-
/* call-seq:
|
243
|
-
klass_flags -> integer
|
244
|
-
|
245
|
-
Returns the klass flags */
|
246
|
-
|
247
|
-
static VALUE prof_allocation_klass_flags(VALUE self)
|
248
|
-
{
|
249
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
250
|
-
return INT2FIX(allocation->klass_flags);
|
251
|
-
}
|
252
|
-
|
253
|
-
/* call-seq:
|
254
|
-
source_file -> string
|
255
|
-
|
256
|
-
Returns the the line number where objects were allocated. */
|
257
|
-
static VALUE prof_allocation_source_file(VALUE self)
|
258
|
-
{
|
259
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
260
|
-
return allocation->source_file;
|
261
|
-
}
|
262
|
-
|
263
|
-
/* call-seq:
|
264
|
-
line -> number
|
265
|
-
|
266
|
-
Returns the the line number where objects were allocated. */
|
267
|
-
static VALUE prof_allocation_source_line(VALUE self)
|
268
|
-
{
|
269
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
270
|
-
return INT2FIX(allocation->source_line);
|
271
|
-
}
|
272
|
-
|
273
|
-
/* call-seq:
|
274
|
-
count -> number
|
275
|
-
|
276
|
-
Returns the number of times this class has been allocated. */
|
277
|
-
static VALUE prof_allocation_count(VALUE self)
|
278
|
-
{
|
279
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
280
|
-
return INT2FIX(allocation->count);
|
281
|
-
}
|
282
|
-
|
283
|
-
/* call-seq:
|
284
|
-
memory -> number
|
285
|
-
|
286
|
-
Returns the amount of memory allocated. */
|
287
|
-
static VALUE prof_allocation_memory(VALUE self)
|
288
|
-
{
|
289
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
290
|
-
return ULL2NUM(allocation->memory);
|
291
|
-
}
|
292
|
-
|
293
|
-
/* :nodoc: */
|
294
|
-
static VALUE prof_allocation_dump(VALUE self)
|
295
|
-
{
|
296
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
297
|
-
|
298
|
-
VALUE result = rb_hash_new();
|
299
|
-
|
300
|
-
rb_hash_aset(result, ID2SYM(rb_intern("key")), ULL2NUM(allocation->key));
|
301
|
-
rb_hash_aset(result, ID2SYM(rb_intern("klass_name")), prof_allocation_klass_name(self));
|
302
|
-
rb_hash_aset(result, ID2SYM(rb_intern("klass_flags")), INT2FIX(allocation->klass_flags));
|
303
|
-
rb_hash_aset(result, ID2SYM(rb_intern("source_file")), allocation->source_file);
|
304
|
-
rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(allocation->source_line));
|
305
|
-
rb_hash_aset(result, ID2SYM(rb_intern("count")), INT2FIX(allocation->count));
|
306
|
-
rb_hash_aset(result, ID2SYM(rb_intern("memory")), ULL2NUM(allocation->memory));
|
307
|
-
|
308
|
-
return result;
|
309
|
-
}
|
310
|
-
|
311
|
-
/* :nodoc: */
|
312
|
-
static VALUE prof_allocation_load(VALUE self, VALUE data)
|
313
|
-
{
|
314
|
-
prof_allocation_t* allocation = prof_allocation_get(self);
|
315
|
-
allocation->object = self;
|
316
|
-
|
317
|
-
allocation->key = RB_NUM2ULL(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
|
318
|
-
allocation->klass_name = rb_hash_aref(data, ID2SYM(rb_intern("klass_name")));
|
319
|
-
allocation->klass_flags = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("klass_flags"))));
|
320
|
-
allocation->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
|
321
|
-
allocation->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
|
322
|
-
allocation->count = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("count"))));
|
323
|
-
allocation->memory = NUM2ULONG(rb_hash_aref(data, ID2SYM(rb_intern("memory"))));
|
324
|
-
|
325
|
-
return data;
|
326
|
-
}
|
327
|
-
|
328
|
-
void rp_init_allocation(void)
|
329
|
-
{
|
330
|
-
cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cObject);
|
331
|
-
rb_undef_method(CLASS_OF(cRpAllocation), "new");
|
332
|
-
rb_define_alloc_func(cRpAllocation, prof_allocation_allocate);
|
333
|
-
|
334
|
-
rb_define_method(cRpAllocation, "klass_name", prof_allocation_klass_name, 0);
|
335
|
-
rb_define_method(cRpAllocation, "klass_flags", prof_allocation_klass_flags, 0);
|
336
|
-
rb_define_method(cRpAllocation, "source_file", prof_allocation_source_file, 0);
|
337
|
-
rb_define_method(cRpAllocation, "line", prof_allocation_source_line, 0);
|
338
|
-
rb_define_method(cRpAllocation, "count", prof_allocation_count, 0);
|
339
|
-
rb_define_method(cRpAllocation, "memory", prof_allocation_memory, 0);
|
340
|
-
rb_define_method(cRpAllocation, "_dump_data", prof_allocation_dump, 0);
|
341
|
-
rb_define_method(cRpAllocation, "_load_data", prof_allocation_load, 1);
|
342
|
-
}
|
1
|
+
/* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
|
2
|
+
Please see the LICENSE file for copyright and distribution information */
|
3
|
+
|
4
|
+
#include "rp_allocation.h"
|
5
|
+
#include "rp_method.h"
|
6
|
+
|
7
|
+
VALUE cRpAllocation;
|
8
|
+
|
9
|
+
// ------ prof_allocation_t ------
|
10
|
+
prof_allocation_t* prof_allocation_create(void)
|
11
|
+
{
|
12
|
+
prof_allocation_t* result = ALLOC(prof_allocation_t);
|
13
|
+
result->count = 0;
|
14
|
+
result->klass = Qnil;
|
15
|
+
result->klass_name = Qnil;
|
16
|
+
result->object = Qnil;
|
17
|
+
result->memory = 0;
|
18
|
+
result->source_line = 0;
|
19
|
+
result->source_file = Qnil;
|
20
|
+
result->key = 0;
|
21
|
+
|
22
|
+
return result;
|
23
|
+
}
|
24
|
+
|
25
|
+
prof_allocation_t* prof_allocation_get(VALUE self)
|
26
|
+
{
|
27
|
+
/* Can't use Data_Get_Struct because that triggers the event hook
|
28
|
+
ending up in endless recursion. */
|
29
|
+
prof_allocation_t* result = RTYPEDDATA_DATA(self);
|
30
|
+
if (!result)
|
31
|
+
rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
|
32
|
+
|
33
|
+
return result;
|
34
|
+
}
|
35
|
+
|
36
|
+
static void prof_allocation_ruby_gc_free(void* data)
|
37
|
+
{
|
38
|
+
if (data)
|
39
|
+
{
|
40
|
+
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
41
|
+
allocation->object = Qnil;
|
42
|
+
}
|
43
|
+
}
|
44
|
+
|
45
|
+
void prof_allocation_free(prof_allocation_t* allocation)
|
46
|
+
{
|
47
|
+
/* Has this allocation object been accessed by Ruby? If
|
48
|
+
yes clean it up so to avoid a segmentation fault. */
|
49
|
+
if (allocation->object != Qnil)
|
50
|
+
{
|
51
|
+
RTYPEDDATA(allocation->object)->data = NULL;
|
52
|
+
allocation->object = Qnil;
|
53
|
+
}
|
54
|
+
|
55
|
+
xfree(allocation);
|
56
|
+
}
|
57
|
+
|
58
|
+
size_t prof_allocation_size(const void* data)
|
59
|
+
{
|
60
|
+
return sizeof(prof_allocation_t);
|
61
|
+
}
|
62
|
+
|
63
|
+
void prof_allocation_mark(void* data)
|
64
|
+
{
|
65
|
+
if (!data) return;
|
66
|
+
|
67
|
+
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
68
|
+
if (allocation->object != Qnil)
|
69
|
+
rb_gc_mark_movable(allocation->object);
|
70
|
+
|
71
|
+
if (allocation->klass != Qnil)
|
72
|
+
rb_gc_mark_movable(allocation->klass);
|
73
|
+
|
74
|
+
if (allocation->klass_name != Qnil)
|
75
|
+
rb_gc_mark_movable(allocation->klass_name);
|
76
|
+
|
77
|
+
if (allocation->source_file != Qnil)
|
78
|
+
rb_gc_mark(allocation->source_file);
|
79
|
+
}
|
80
|
+
|
81
|
+
void prof_allocation_compact(void* data)
|
82
|
+
{
|
83
|
+
prof_allocation_t* allocation = (prof_allocation_t*)data;
|
84
|
+
allocation->object = rb_gc_location(allocation->object);
|
85
|
+
allocation->klass = rb_gc_location(allocation->klass);
|
86
|
+
allocation->klass_name = rb_gc_location(allocation->klass_name);
|
87
|
+
}
|
88
|
+
|
89
|
+
static const rb_data_type_t allocation_type =
|
90
|
+
{
|
91
|
+
.wrap_struct_name = "Allocation",
|
92
|
+
.function =
|
93
|
+
{
|
94
|
+
.dmark = prof_allocation_mark,
|
95
|
+
.dfree = prof_allocation_ruby_gc_free,
|
96
|
+
.dsize = prof_allocation_size,
|
97
|
+
.dcompact = prof_allocation_compact
|
98
|
+
},
|
99
|
+
.data = NULL,
|
100
|
+
.flags = RUBY_TYPED_FREE_IMMEDIATELY
|
101
|
+
};
|
102
|
+
|
103
|
+
VALUE prof_allocation_wrap(prof_allocation_t* allocation)
|
104
|
+
{
|
105
|
+
if (allocation->object == Qnil)
|
106
|
+
{
|
107
|
+
allocation->object = TypedData_Wrap_Struct(cRpAllocation, &allocation_type, allocation);
|
108
|
+
}
|
109
|
+
return allocation->object;
|
110
|
+
}
|
111
|
+
|
112
|
+
/* ====== Allocation Table ====== */
|
113
|
+
st_table* prof_allocations_create(void)
|
114
|
+
{
|
115
|
+
return rb_st_init_numtable();
|
116
|
+
}
|
117
|
+
|
118
|
+
static int allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
|
119
|
+
{
|
120
|
+
prof_allocation_free((prof_allocation_t*)value);
|
121
|
+
return ST_CONTINUE;
|
122
|
+
}
|
123
|
+
|
124
|
+
st_data_t allocations_key(VALUE klass, int source_line)
|
125
|
+
{
|
126
|
+
return (klass << 4) + source_line;
|
127
|
+
}
|
128
|
+
|
129
|
+
static int prof_allocations_collect(st_data_t key, st_data_t value, st_data_t result)
|
130
|
+
{
|
131
|
+
prof_allocation_t* allocation = (prof_allocation_t*)value;
|
132
|
+
VALUE arr = (VALUE)result;
|
133
|
+
rb_ary_push(arr, prof_allocation_wrap(allocation));
|
134
|
+
return ST_CONTINUE;
|
135
|
+
}
|
136
|
+
|
137
|
+
static int prof_allocations_mark_each(st_data_t key, st_data_t value, st_data_t data)
|
138
|
+
{
|
139
|
+
prof_allocation_t* allocation = (prof_allocation_t*)value;
|
140
|
+
prof_allocation_mark(allocation);
|
141
|
+
return ST_CONTINUE;
|
142
|
+
}
|
143
|
+
|
144
|
+
void prof_allocations_mark(st_table* allocations_table)
|
145
|
+
{
|
146
|
+
rb_st_foreach(allocations_table, prof_allocations_mark_each, 0);
|
147
|
+
}
|
148
|
+
|
149
|
+
void prof_allocations_free(st_table* table)
|
150
|
+
{
|
151
|
+
rb_st_foreach(table, allocations_table_free_iterator, 0);
|
152
|
+
rb_st_free_table(table);
|
153
|
+
}
|
154
|
+
|
155
|
+
prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
|
156
|
+
{
|
157
|
+
prof_allocation_t* result = NULL;
|
158
|
+
st_data_t value;
|
159
|
+
if (rb_st_lookup(table, key, &value))
|
160
|
+
{
|
161
|
+
result = (prof_allocation_t*)value;
|
162
|
+
}
|
163
|
+
|
164
|
+
return result;
|
165
|
+
}
|
166
|
+
|
167
|
+
void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
|
168
|
+
{
|
169
|
+
rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
|
170
|
+
}
|
171
|
+
|
172
|
+
prof_allocation_t* prof_allocate_increment(st_table* allocations_table, rb_trace_arg_t* trace_arg)
|
173
|
+
{
|
174
|
+
VALUE object = rb_tracearg_object(trace_arg);
|
175
|
+
if (BUILTIN_TYPE(object) == T_IMEMO)
|
176
|
+
return NULL;
|
177
|
+
|
178
|
+
VALUE klass = rb_obj_class(object);
|
179
|
+
|
180
|
+
int source_line = FIX2INT(rb_tracearg_lineno(trace_arg));
|
181
|
+
st_data_t key = allocations_key(klass, source_line);
|
182
|
+
|
183
|
+
prof_allocation_t* allocation = allocations_table_lookup(allocations_table, key);
|
184
|
+
if (!allocation)
|
185
|
+
{
|
186
|
+
allocation = prof_allocation_create();
|
187
|
+
allocation->source_line = source_line;
|
188
|
+
allocation->source_file = rb_tracearg_path(trace_arg);
|
189
|
+
allocation->klass_flags = 0;
|
190
|
+
allocation->klass = resolve_klass(klass, &allocation->klass_flags);
|
191
|
+
|
192
|
+
allocation->key = key;
|
193
|
+
allocations_table_insert(allocations_table, key, allocation);
|
194
|
+
}
|
195
|
+
|
196
|
+
allocation->count++;
|
197
|
+
allocation->memory += rb_obj_memsize_of(object);
|
198
|
+
|
199
|
+
return allocation;
|
200
|
+
}
|
201
|
+
|
202
|
+
// Returns an array of allocations
|
203
|
+
VALUE prof_allocations_wrap(st_table* allocations_table)
|
204
|
+
{
|
205
|
+
VALUE result = rb_ary_new();
|
206
|
+
rb_st_foreach(allocations_table, prof_allocations_collect, result);
|
207
|
+
return result;
|
208
|
+
}
|
209
|
+
|
210
|
+
void prof_allocations_unwrap(st_table* allocations_table, VALUE allocations)
|
211
|
+
{
|
212
|
+
for (int i = 0; i < rb_array_len(allocations); i++)
|
213
|
+
{
|
214
|
+
VALUE allocation = rb_ary_entry(allocations, i);
|
215
|
+
prof_allocation_t* allocation_data = prof_allocation_get(allocation);
|
216
|
+
rb_st_insert(allocations_table, allocation_data->key, (st_data_t)allocation_data);
|
217
|
+
}
|
218
|
+
}
|
219
|
+
|
220
|
+
/* ====== prof_allocation_t ====== */
|
221
|
+
static VALUE prof_allocation_allocate(VALUE klass)
|
222
|
+
{
|
223
|
+
prof_allocation_t* allocation = prof_allocation_create();
|
224
|
+
allocation->object = prof_allocation_wrap(allocation);
|
225
|
+
return allocation->object;
|
226
|
+
}
|
227
|
+
|
228
|
+
/* call-seq:
|
229
|
+
klass -> Class
|
230
|
+
|
231
|
+
Returns the type of Class being allocated. */
|
232
|
+
static VALUE prof_allocation_klass_name(VALUE self)
|
233
|
+
{
|
234
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
235
|
+
|
236
|
+
if (allocation->klass_name == Qnil)
|
237
|
+
allocation->klass_name = resolve_klass_name(allocation->klass, &allocation->klass_flags);
|
238
|
+
|
239
|
+
return allocation->klass_name;
|
240
|
+
}
|
241
|
+
|
242
|
+
/* call-seq:
|
243
|
+
klass_flags -> integer
|
244
|
+
|
245
|
+
Returns the klass flags */
|
246
|
+
|
247
|
+
static VALUE prof_allocation_klass_flags(VALUE self)
|
248
|
+
{
|
249
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
250
|
+
return INT2FIX(allocation->klass_flags);
|
251
|
+
}
|
252
|
+
|
253
|
+
/* call-seq:
|
254
|
+
source_file -> string
|
255
|
+
|
256
|
+
Returns the the line number where objects were allocated. */
|
257
|
+
static VALUE prof_allocation_source_file(VALUE self)
|
258
|
+
{
|
259
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
260
|
+
return allocation->source_file;
|
261
|
+
}
|
262
|
+
|
263
|
+
/* call-seq:
|
264
|
+
line -> number
|
265
|
+
|
266
|
+
Returns the the line number where objects were allocated. */
|
267
|
+
static VALUE prof_allocation_source_line(VALUE self)
|
268
|
+
{
|
269
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
270
|
+
return INT2FIX(allocation->source_line);
|
271
|
+
}
|
272
|
+
|
273
|
+
/* call-seq:
|
274
|
+
count -> number
|
275
|
+
|
276
|
+
Returns the number of times this class has been allocated. */
|
277
|
+
static VALUE prof_allocation_count(VALUE self)
|
278
|
+
{
|
279
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
280
|
+
return INT2FIX(allocation->count);
|
281
|
+
}
|
282
|
+
|
283
|
+
/* call-seq:
|
284
|
+
memory -> number
|
285
|
+
|
286
|
+
Returns the amount of memory allocated. */
|
287
|
+
static VALUE prof_allocation_memory(VALUE self)
|
288
|
+
{
|
289
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
290
|
+
return ULL2NUM(allocation->memory);
|
291
|
+
}
|
292
|
+
|
293
|
+
/* :nodoc: */
|
294
|
+
static VALUE prof_allocation_dump(VALUE self)
|
295
|
+
{
|
296
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
297
|
+
|
298
|
+
VALUE result = rb_hash_new();
|
299
|
+
|
300
|
+
rb_hash_aset(result, ID2SYM(rb_intern("key")), ULL2NUM(allocation->key));
|
301
|
+
rb_hash_aset(result, ID2SYM(rb_intern("klass_name")), prof_allocation_klass_name(self));
|
302
|
+
rb_hash_aset(result, ID2SYM(rb_intern("klass_flags")), INT2FIX(allocation->klass_flags));
|
303
|
+
rb_hash_aset(result, ID2SYM(rb_intern("source_file")), allocation->source_file);
|
304
|
+
rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(allocation->source_line));
|
305
|
+
rb_hash_aset(result, ID2SYM(rb_intern("count")), INT2FIX(allocation->count));
|
306
|
+
rb_hash_aset(result, ID2SYM(rb_intern("memory")), ULL2NUM(allocation->memory));
|
307
|
+
|
308
|
+
return result;
|
309
|
+
}
|
310
|
+
|
311
|
+
/* :nodoc: */
|
312
|
+
static VALUE prof_allocation_load(VALUE self, VALUE data)
|
313
|
+
{
|
314
|
+
prof_allocation_t* allocation = prof_allocation_get(self);
|
315
|
+
allocation->object = self;
|
316
|
+
|
317
|
+
allocation->key = RB_NUM2ULL(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
|
318
|
+
allocation->klass_name = rb_hash_aref(data, ID2SYM(rb_intern("klass_name")));
|
319
|
+
allocation->klass_flags = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("klass_flags"))));
|
320
|
+
allocation->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
|
321
|
+
allocation->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
|
322
|
+
allocation->count = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("count"))));
|
323
|
+
allocation->memory = NUM2ULONG(rb_hash_aref(data, ID2SYM(rb_intern("memory"))));
|
324
|
+
|
325
|
+
return data;
|
326
|
+
}
|
327
|
+
|
328
|
+
void rp_init_allocation(void)
|
329
|
+
{
|
330
|
+
cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cObject);
|
331
|
+
rb_undef_method(CLASS_OF(cRpAllocation), "new");
|
332
|
+
rb_define_alloc_func(cRpAllocation, prof_allocation_allocate);
|
333
|
+
|
334
|
+
rb_define_method(cRpAllocation, "klass_name", prof_allocation_klass_name, 0);
|
335
|
+
rb_define_method(cRpAllocation, "klass_flags", prof_allocation_klass_flags, 0);
|
336
|
+
rb_define_method(cRpAllocation, "source_file", prof_allocation_source_file, 0);
|
337
|
+
rb_define_method(cRpAllocation, "line", prof_allocation_source_line, 0);
|
338
|
+
rb_define_method(cRpAllocation, "count", prof_allocation_count, 0);
|
339
|
+
rb_define_method(cRpAllocation, "memory", prof_allocation_memory, 0);
|
340
|
+
rb_define_method(cRpAllocation, "_dump_data", prof_allocation_dump, 0);
|
341
|
+
rb_define_method(cRpAllocation, "_load_data", prof_allocation_load, 1);
|
342
|
+
}
|