ruby-prof 1.4.5-x64-mingw-ucrt → 1.5.0-x64-mingw-ucrt
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGES +6 -0
- data/Rakefile +1 -1
- data/ext/ruby_prof/rp_allocation.c +4 -4
- data/ext/ruby_prof/rp_call_tree.c +126 -36
- data/ext/ruby_prof/rp_call_tree.h +2 -1
- data/ext/ruby_prof/rp_call_trees.c +15 -7
- data/ext/ruby_prof/rp_measurement.c +139 -19
- data/ext/ruby_prof/rp_measurement.h +3 -0
- data/ext/ruby_prof/rp_method.c +58 -6
- data/ext/ruby_prof/rp_method.h +1 -1
- data/ext/ruby_prof/rp_profile.c +36 -3
- data/ext/ruby_prof/rp_thread.c +61 -13
- data/ext/ruby_prof/rp_thread.h +1 -1
- data/ext/ruby_prof/ruby_prof.c +0 -2
- data/ext/ruby_prof/ruby_prof.h +8 -0
- data/ext/ruby_prof/vc/ruby_prof.vcxproj +4 -6
- data/lib/3.1/ruby_prof.so +0 -0
- data/lib/3.2/ruby_prof.so +0 -0
- data/lib/ruby-prof/method_info.rb +8 -1
- data/lib/ruby-prof/printers/call_tree_printer.rb +0 -2
- data/lib/ruby-prof/profile.rb +34 -1
- data/lib/ruby-prof/version.rb +1 -1
- data/test/call_tree_builder.rb +126 -0
- data/test/call_tree_test.rb +197 -0
- data/test/call_trees_test.rb +4 -4
- data/test/fiber_test.rb +123 -1
- data/test/gc_test.rb +9 -7
- data/test/line_number_test.rb +36 -60
- data/test/measurement_test.rb +82 -0
- data/test/method_info_test.rb +95 -0
- data/test/profile_test.rb +85 -0
- data/test/recursive_test.rb +1 -1
- data/test/scheduler.rb +354 -0
- data/test/thread_test.rb +28 -2
- metadata +9 -6
- data/ext/ruby_prof/rp_aggregate_call_tree.c +0 -59
- data/ext/ruby_prof/rp_aggregate_call_tree.h +0 -13
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: dc83e5cdca77c3e07af7ad99f69acfbf8dde2013c435855d996925db9dcd586c
|
4
|
+
data.tar.gz: 9c14d146c1194db666cec9f9ab05e4a798bd0a6ed5c535d850be581ddf8734fa
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 56e6c6a054fed0bdbb08a41ead10898495efb4201c19f2bd3572f3b0ac939e1770c3c4f1bb82da4399ba27f99ed135447e004263d57af86a0888a6a6a349f1fc
|
7
|
+
data.tar.gz: 251eba317457ec6ea583b25656c062f92e8f55a35c7279f39a1f9987234fd9b481fbfd6ab72d71af34541ca49aad2b0feacefbc4d44ca75a4486ec003cb2fa19
|
data/CHANGES
CHANGED
@@ -1,3 +1,9 @@
|
|
1
|
+
1.5.0 (2023-02-06)
|
2
|
+
=====================
|
3
|
+
* Add new Profile#merge! method that merges results for threads/fibers that share the same root method (Charlie Savage)
|
4
|
+
* Expand API to allow creation of +Measurement+, +CallTree+, +MethodInfo+ and +Thread+ instances. This
|
5
|
+
was done to make is possible to write tests for the new Profile#merge! functionality (Charlie Savage)
|
6
|
+
|
1
7
|
1.4.5 (2022-12-27)
|
2
8
|
=====================
|
3
9
|
* Look for ruby_prof extension built on install in the lib directory (Charlie Savage)
|
data/Rakefile
CHANGED
@@ -61,7 +61,7 @@ end
|
|
61
61
|
desc "Generate rdoc documentation"
|
62
62
|
RDoc::Task.new("rdoc") do |rdoc|
|
63
63
|
rdoc.rdoc_dir = 'doc'
|
64
|
-
rdoc.title
|
64
|
+
rdoc.title = "ruby-prof"
|
65
65
|
# Show source inline with line numbers
|
66
66
|
rdoc.options << "--line-numbers"
|
67
67
|
# Make the readme file the start page for the generated html
|
@@ -242,13 +242,13 @@ static VALUE prof_allocation_dump(VALUE self)
|
|
242
242
|
|
243
243
|
VALUE result = rb_hash_new();
|
244
244
|
|
245
|
-
rb_hash_aset(result, ID2SYM(rb_intern("key")),
|
245
|
+
rb_hash_aset(result, ID2SYM(rb_intern("key")), ULL2NUM(allocation->key));
|
246
246
|
rb_hash_aset(result, ID2SYM(rb_intern("klass_name")), prof_allocation_klass_name(self));
|
247
247
|
rb_hash_aset(result, ID2SYM(rb_intern("klass_flags")), INT2FIX(allocation->klass_flags));
|
248
248
|
rb_hash_aset(result, ID2SYM(rb_intern("source_file")), allocation->source_file);
|
249
249
|
rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(allocation->source_line));
|
250
250
|
rb_hash_aset(result, ID2SYM(rb_intern("count")), INT2FIX(allocation->count));
|
251
|
-
rb_hash_aset(result, ID2SYM(rb_intern("memory")),
|
251
|
+
rb_hash_aset(result, ID2SYM(rb_intern("memory")), ULL2NUM(allocation->memory));
|
252
252
|
|
253
253
|
return result;
|
254
254
|
}
|
@@ -259,13 +259,13 @@ static VALUE prof_allocation_load(VALUE self, VALUE data)
|
|
259
259
|
prof_allocation_t* allocation = prof_get_allocation(self);
|
260
260
|
allocation->object = self;
|
261
261
|
|
262
|
-
allocation->key =
|
262
|
+
allocation->key = RB_NUM2ULL(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
|
263
263
|
allocation->klass_name = rb_hash_aref(data, ID2SYM(rb_intern("klass_name")));
|
264
264
|
allocation->klass_flags = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("klass_flags"))));
|
265
265
|
allocation->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
|
266
266
|
allocation->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
|
267
267
|
allocation->count = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("count"))));
|
268
|
-
allocation->memory =
|
268
|
+
allocation->memory = NUM2ULONG(rb_hash_aref(data, ID2SYM(rb_intern("memory"))));
|
269
269
|
|
270
270
|
return data;
|
271
271
|
}
|
@@ -9,6 +9,7 @@ VALUE cRpCallTree;
|
|
9
9
|
prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t* parent, VALUE source_file, int source_line)
|
10
10
|
{
|
11
11
|
prof_call_tree_t* result = ALLOC(prof_call_tree_t);
|
12
|
+
result->owner = OWNER_C;
|
12
13
|
result->method = method;
|
13
14
|
result->parent = parent;
|
14
15
|
result->object = Qnil;
|
@@ -23,34 +24,12 @@ prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t*
|
|
23
24
|
|
24
25
|
prof_call_tree_t* prof_call_tree_copy(prof_call_tree_t* other)
|
25
26
|
{
|
26
|
-
prof_call_tree_t* result =
|
27
|
-
result->
|
28
|
-
result->object = Qnil;
|
29
|
-
result->visits = 0;
|
30
|
-
|
31
|
-
result->method = other->method;
|
32
|
-
result->parent = other->parent;
|
33
|
-
result->source_line = other->source_line;
|
34
|
-
result->source_file = other->source_file;
|
35
|
-
|
36
|
-
result->measurement = prof_measurement_create();
|
37
|
-
result->measurement->called = other->measurement->called;
|
38
|
-
result->measurement->total_time = other->measurement->total_time;
|
39
|
-
result->measurement->self_time = other->measurement->self_time;
|
40
|
-
result->measurement->wait_time = other->measurement->wait_time;
|
41
|
-
result->measurement->object = Qnil;
|
27
|
+
prof_call_tree_t* result = prof_call_tree_create(other->method, other->parent, other->source_file, other->source_line);
|
28
|
+
result->measurement = prof_measurement_copy(other->measurement);
|
42
29
|
|
43
30
|
return result;
|
44
31
|
}
|
45
32
|
|
46
|
-
void prof_call_tree_merge(prof_call_tree_t* result, prof_call_tree_t* other)
|
47
|
-
{
|
48
|
-
result->measurement->called += other->measurement->called;
|
49
|
-
result->measurement->total_time += other->measurement->total_time;
|
50
|
-
result->measurement->self_time += other->measurement->self_time;
|
51
|
-
result->measurement->wait_time += other->measurement->wait_time;
|
52
|
-
}
|
53
|
-
|
54
33
|
static int prof_call_tree_collect_children(st_data_t key, st_data_t value, st_data_t result)
|
55
34
|
{
|
56
35
|
prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
|
@@ -89,15 +68,6 @@ void prof_call_tree_mark(void* data)
|
|
89
68
|
rb_st_foreach(call_tree->children, prof_call_tree_mark_children, 0);
|
90
69
|
}
|
91
70
|
|
92
|
-
static void prof_call_tree_ruby_gc_free(void* data)
|
93
|
-
{
|
94
|
-
if (data)
|
95
|
-
{
|
96
|
-
prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
|
97
|
-
call_tree->object = Qnil;
|
98
|
-
}
|
99
|
-
}
|
100
|
-
|
101
71
|
static int prof_call_tree_free_children(st_data_t key, st_data_t value, st_data_t data)
|
102
72
|
{
|
103
73
|
prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
|
@@ -126,6 +96,27 @@ void prof_call_tree_free(prof_call_tree_t* call_tree_data)
|
|
126
96
|
xfree(call_tree_data);
|
127
97
|
}
|
128
98
|
|
99
|
+
static void prof_call_tree_ruby_gc_free(void* data)
|
100
|
+
{
|
101
|
+
prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
|
102
|
+
|
103
|
+
if (!call_tree)
|
104
|
+
{
|
105
|
+
// Object has already been freed by C code
|
106
|
+
return;
|
107
|
+
}
|
108
|
+
else if (call_tree->owner == OWNER_RUBY)
|
109
|
+
{
|
110
|
+
// Ruby owns this object, we need to free the underlying C struct
|
111
|
+
prof_call_tree_free(call_tree);
|
112
|
+
}
|
113
|
+
else
|
114
|
+
{
|
115
|
+
// The Ruby object is being freed, but not the underlying C structure. So unlink the two.
|
116
|
+
call_tree->object = Qnil;
|
117
|
+
}
|
118
|
+
}
|
119
|
+
|
129
120
|
size_t prof_call_tree_size(const void* data)
|
130
121
|
{
|
131
122
|
return sizeof(prof_call_tree_t);
|
@@ -156,6 +147,8 @@ VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree)
|
|
156
147
|
static VALUE prof_call_tree_allocate(VALUE klass)
|
157
148
|
{
|
158
149
|
prof_call_tree_t* call_tree = prof_call_tree_create(NULL, NULL, Qnil, 0);
|
150
|
+
// This object is being created by Ruby
|
151
|
+
call_tree->owner = OWNER_RUBY;
|
159
152
|
call_tree->object = prof_call_tree_wrap(call_tree);
|
160
153
|
return call_tree->object;
|
161
154
|
}
|
@@ -213,10 +206,26 @@ void prof_call_tree_add_parent(prof_call_tree_t* self, prof_call_tree_t* parent)
|
|
213
206
|
void prof_call_tree_add_child(prof_call_tree_t* self, prof_call_tree_t* child)
|
214
207
|
{
|
215
208
|
call_tree_table_insert(self->children, child->method->key, child);
|
209
|
+
|
210
|
+
// The child is now managed by C since its parent will free it
|
211
|
+
child->owner = OWNER_C;
|
216
212
|
}
|
217
213
|
|
218
214
|
/* ======= RubyProf::CallTree ========*/
|
219
215
|
|
216
|
+
/* call-seq:
|
217
|
+
new(method_info) -> call_tree
|
218
|
+
|
219
|
+
Creates a new CallTree instance. +Klass+ should be a reference to
|
220
|
+
a Ruby class and +method_name+ a symbol identifying one of its instance methods.*/
|
221
|
+
static VALUE prof_call_tree_initialize(VALUE self, VALUE method_info)
|
222
|
+
{
|
223
|
+
prof_call_tree_t* call_tree_ptr = prof_get_call_tree(self);
|
224
|
+
call_tree_ptr->method = prof_get_method(method_info);
|
225
|
+
|
226
|
+
return self;
|
227
|
+
}
|
228
|
+
|
220
229
|
/* call-seq:
|
221
230
|
parent -> call_tree
|
222
231
|
|
@@ -242,6 +251,29 @@ static VALUE prof_call_tree_children(VALUE self)
|
|
242
251
|
return result;
|
243
252
|
}
|
244
253
|
|
254
|
+
/* call-seq:
|
255
|
+
add_child(call_tree) -> call_tree
|
256
|
+
|
257
|
+
Adds the specified call_tree as a child. If the method represented by the call tree is
|
258
|
+
already a child than a IndexError is thrown.
|
259
|
+
|
260
|
+
The returned value is the added child*/
|
261
|
+
static VALUE prof_call_tree_add_child_ruby(VALUE self, VALUE child)
|
262
|
+
{
|
263
|
+
prof_call_tree_t* parent_ptr = prof_get_call_tree(self);
|
264
|
+
prof_call_tree_t* child_ptr = prof_get_call_tree(child);
|
265
|
+
|
266
|
+
prof_call_tree_t* existing_ptr = call_tree_table_lookup(parent_ptr->children, child_ptr->method->key);
|
267
|
+
if (existing_ptr)
|
268
|
+
{
|
269
|
+
rb_raise(rb_eIndexError, "Child call tree already exists");
|
270
|
+
}
|
271
|
+
|
272
|
+
prof_call_tree_add_parent(child_ptr, parent_ptr);
|
273
|
+
|
274
|
+
return child;
|
275
|
+
}
|
276
|
+
|
245
277
|
/* call-seq:
|
246
278
|
called -> MethodInfo
|
247
279
|
|
@@ -294,12 +326,65 @@ static VALUE prof_call_tree_line(VALUE self)
|
|
294
326
|
return INT2FIX(result->source_line);
|
295
327
|
}
|
296
328
|
|
329
|
+
static int prof_call_tree_merge_children(st_data_t key, st_data_t value, st_data_t data)
|
330
|
+
{
|
331
|
+
prof_call_tree_t* other_child = (prof_call_tree_t*)value;
|
332
|
+
prof_call_tree_t* self = (prof_call_tree_t*)data;
|
333
|
+
|
334
|
+
st_data_t self_child;
|
335
|
+
if (rb_st_lookup(self->children, other_child->method->key, &self_child))
|
336
|
+
{
|
337
|
+
prof_call_tree_merge_internal((prof_call_tree_t*)self_child, other_child);
|
338
|
+
}
|
339
|
+
else
|
340
|
+
{
|
341
|
+
prof_call_tree_t* copy = prof_call_tree_copy(other_child);
|
342
|
+
prof_call_tree_add_child(self, copy);
|
343
|
+
}
|
344
|
+
return ST_CONTINUE;
|
345
|
+
}
|
346
|
+
|
347
|
+
void prof_call_tree_merge_internal(prof_call_tree_t* self, prof_call_tree_t* other)
|
348
|
+
{
|
349
|
+
// Make sure the methods are the same
|
350
|
+
if (self->method->key != other->method->key)
|
351
|
+
return;
|
352
|
+
|
353
|
+
// Make sure the parents are the same.
|
354
|
+
// 1. They can both be set and be equal
|
355
|
+
// 2. They can both be unset (null)
|
356
|
+
if (self->parent && other->parent)
|
357
|
+
{
|
358
|
+
if (self->parent->method->key != other->parent->method->key)
|
359
|
+
return;
|
360
|
+
}
|
361
|
+
else if (self->parent || other->parent)
|
362
|
+
{
|
363
|
+
return;
|
364
|
+
}
|
365
|
+
|
366
|
+
prof_measurement_merge_internal(self->measurement, other->measurement);
|
367
|
+
prof_measurement_merge_internal(self->method->measurement, other->method->measurement);
|
368
|
+
|
369
|
+
rb_st_foreach(other->children, prof_call_tree_merge_children, (st_data_t)self);
|
370
|
+
}
|
371
|
+
|
372
|
+
VALUE prof_call_tree_merge(VALUE self, VALUE other)
|
373
|
+
{
|
374
|
+
prof_call_tree_t* source = prof_get_call_tree(self);
|
375
|
+
prof_call_tree_t* destination = prof_get_call_tree(other);
|
376
|
+
prof_call_tree_merge_internal(source, destination);
|
377
|
+
return other;
|
378
|
+
}
|
379
|
+
|
297
380
|
/* :nodoc: */
|
298
381
|
static VALUE prof_call_tree_dump(VALUE self)
|
299
382
|
{
|
300
383
|
prof_call_tree_t* call_tree_data = prof_get_call_tree(self);
|
301
384
|
VALUE result = rb_hash_new();
|
302
385
|
|
386
|
+
rb_hash_aset(result, ID2SYM(rb_intern("owner")), INT2FIX(call_tree_data->owner));
|
387
|
+
|
303
388
|
rb_hash_aset(result, ID2SYM(rb_intern("measurement")), prof_measurement_wrap(call_tree_data->measurement));
|
304
389
|
|
305
390
|
rb_hash_aset(result, ID2SYM(rb_intern("source_file")), call_tree_data->source_file);
|
@@ -320,6 +405,8 @@ static VALUE prof_call_tree_load(VALUE self, VALUE data)
|
|
320
405
|
prof_call_tree_t* call_tree = prof_get_call_tree(self);
|
321
406
|
call_tree->object = self;
|
322
407
|
|
408
|
+
call_tree->owner = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("owner"))));
|
409
|
+
|
323
410
|
VALUE measurement = rb_hash_aref(data, ID2SYM(rb_intern("measurement")));
|
324
411
|
call_tree->measurement = prof_get_measurement(measurement);
|
325
412
|
|
@@ -350,18 +437,21 @@ void rp_init_call_tree()
|
|
350
437
|
{
|
351
438
|
/* CallTree */
|
352
439
|
cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cObject);
|
353
|
-
rb_undef_method(CLASS_OF(cRpCallTree), "new");
|
354
440
|
rb_define_alloc_func(cRpCallTree, prof_call_tree_allocate);
|
441
|
+
rb_define_method(cRpCallTree, "initialize", prof_call_tree_initialize, 1);
|
355
442
|
|
356
|
-
rb_define_method(cRpCallTree, "parent", prof_call_tree_parent, 0);
|
357
|
-
rb_define_method(cRpCallTree, "children", prof_call_tree_children, 0);
|
358
443
|
rb_define_method(cRpCallTree, "target", prof_call_tree_target, 0);
|
359
444
|
rb_define_method(cRpCallTree, "measurement", prof_call_tree_measurement, 0);
|
445
|
+
rb_define_method(cRpCallTree, "parent", prof_call_tree_parent, 0);
|
446
|
+
rb_define_method(cRpCallTree, "children", prof_call_tree_children, 0);
|
447
|
+
rb_define_method(cRpCallTree, "add_child", prof_call_tree_add_child_ruby, 1);
|
360
448
|
|
361
449
|
rb_define_method(cRpCallTree, "depth", prof_call_tree_depth, 0);
|
362
450
|
rb_define_method(cRpCallTree, "source_file", prof_call_tree_source_file, 0);
|
363
451
|
rb_define_method(cRpCallTree, "line", prof_call_tree_line, 0);
|
364
452
|
|
453
|
+
rb_define_method(cRpCallTree, "merge!", prof_call_tree_merge, 1);
|
454
|
+
|
365
455
|
rb_define_method(cRpCallTree, "_dump_data", prof_call_tree_dump, 0);
|
366
456
|
rb_define_method(cRpCallTree, "_load_data", prof_call_tree_load, 1);
|
367
457
|
}
|
@@ -13,6 +13,7 @@ extern VALUE cRpCallTree;
|
|
13
13
|
/* Callers and callee information for a method. */
|
14
14
|
typedef struct prof_call_tree_t
|
15
15
|
{
|
16
|
+
prof_owner_t owner;
|
16
17
|
prof_method_t* method;
|
17
18
|
struct prof_call_tree_t* parent;
|
18
19
|
st_table* children; /* Call infos that this call info calls */
|
@@ -27,7 +28,7 @@ typedef struct prof_call_tree_t
|
|
27
28
|
|
28
29
|
prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t* parent, VALUE source_file, int source_line);
|
29
30
|
prof_call_tree_t* prof_call_tree_copy(prof_call_tree_t* other);
|
30
|
-
void
|
31
|
+
void prof_call_tree_merge_internal(prof_call_tree_t* destination, prof_call_tree_t* other);
|
31
32
|
void prof_call_tree_mark(void* data);
|
32
33
|
prof_call_tree_t* call_tree_table_lookup(st_table* table, st_data_t key);
|
33
34
|
|
@@ -1,7 +1,6 @@
|
|
1
1
|
/* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
|
2
2
|
Please see the LICENSE file for copyright and distribution information */
|
3
3
|
|
4
|
-
#include "rp_aggregate_call_tree.h"
|
5
4
|
#include "rp_call_trees.h"
|
6
5
|
#include "rp_measurement.h"
|
7
6
|
|
@@ -68,11 +67,11 @@ void prof_call_trees_ruby_gc_free(void* data)
|
|
68
67
|
}
|
69
68
|
}
|
70
69
|
|
71
|
-
static int
|
70
|
+
static int prof_call_trees_collect(st_data_t key, st_data_t value, st_data_t data)
|
72
71
|
{
|
73
72
|
VALUE result = (VALUE)data;
|
74
73
|
prof_call_tree_t* call_tree_data = (prof_call_tree_t*)value;
|
75
|
-
VALUE aggregate_call_tree =
|
74
|
+
VALUE aggregate_call_tree = prof_call_tree_wrap(call_tree_data);
|
76
75
|
rb_ary_push(result, aggregate_call_tree);
|
77
76
|
|
78
77
|
return ST_CONTINUE;
|
@@ -87,11 +86,16 @@ static int prof_call_trees_collect_callees(st_data_t key, st_data_t value, st_da
|
|
87
86
|
|
88
87
|
if (rb_st_lookup(callers, call_tree_data->method->key, (st_data_t*)&aggregate_call_tree_data))
|
89
88
|
{
|
90
|
-
|
89
|
+
prof_measurement_merge_internal(aggregate_call_tree_data->measurement, call_tree_data->measurement);
|
91
90
|
}
|
92
91
|
else
|
93
92
|
{
|
93
|
+
// Copy the call tree so we don't touch the original and give Ruby ownerhip
|
94
|
+
// of it so that it is freed on GC
|
94
95
|
aggregate_call_tree_data = prof_call_tree_copy(call_tree_data);
|
96
|
+
aggregate_call_tree_data->owner = OWNER_RUBY;
|
97
|
+
|
98
|
+
|
95
99
|
rb_st_insert(callers, call_tree_data->method->key, (st_data_t)aggregate_call_tree_data);
|
96
100
|
}
|
97
101
|
|
@@ -210,17 +214,21 @@ VALUE prof_call_trees_callers(VALUE self)
|
|
210
214
|
|
211
215
|
if (rb_st_lookup(callers, parent->method->key, (st_data_t*)&aggregate_call_tree_data))
|
212
216
|
{
|
213
|
-
|
217
|
+
prof_measurement_merge_internal(aggregate_call_tree_data->measurement, (*p_call_tree)->measurement);
|
214
218
|
}
|
215
219
|
else
|
216
220
|
{
|
221
|
+
// Copy the call tree so we don't touch the original and give Ruby ownerhip
|
222
|
+
// of it so that it is freed on GC
|
217
223
|
aggregate_call_tree_data = prof_call_tree_copy(*p_call_tree);
|
224
|
+
aggregate_call_tree_data->owner = OWNER_RUBY;
|
225
|
+
|
218
226
|
rb_st_insert(callers, parent->method->key, (st_data_t)aggregate_call_tree_data);
|
219
227
|
}
|
220
228
|
}
|
221
229
|
|
222
230
|
VALUE result = rb_ary_new_capa((long)callers->num_entries);
|
223
|
-
rb_st_foreach(callers,
|
231
|
+
rb_st_foreach(callers, prof_call_trees_collect, result);
|
224
232
|
rb_st_free_table(callers);
|
225
233
|
return result;
|
226
234
|
}
|
@@ -240,7 +248,7 @@ VALUE prof_call_trees_callees(VALUE self)
|
|
240
248
|
}
|
241
249
|
|
242
250
|
VALUE result = rb_ary_new_capa((long)callees->num_entries);
|
243
|
-
rb_st_foreach(callees,
|
251
|
+
rb_st_foreach(callees, prof_call_trees_collect, result);
|
244
252
|
rb_st_free_table(callees);
|
245
253
|
return result;
|
246
254
|
}
|
@@ -43,6 +43,7 @@ double prof_measure(prof_measurer_t* measurer, rb_trace_arg_t* trace_arg)
|
|
43
43
|
prof_measurement_t* prof_measurement_create(void)
|
44
44
|
{
|
45
45
|
prof_measurement_t* result = ALLOC(prof_measurement_t);
|
46
|
+
result->owner = OWNER_C;
|
46
47
|
result->total_time = 0;
|
47
48
|
result->self_time = 0;
|
48
49
|
result->wait_time = 0;
|
@@ -51,6 +52,52 @@ prof_measurement_t* prof_measurement_create(void)
|
|
51
52
|
return result;
|
52
53
|
}
|
53
54
|
|
55
|
+
/* call-seq:
|
56
|
+
new(total_time, self_time, wait_time, called) -> Measurement
|
57
|
+
|
58
|
+
Creates a new measuremen instance. */
|
59
|
+
static VALUE prof_measurement_initialize(VALUE self, VALUE total_time, VALUE self_time, VALUE wait_time, VALUE called)
|
60
|
+
{
|
61
|
+
prof_measurement_t* result = prof_get_measurement(self);
|
62
|
+
|
63
|
+
result->total_time = NUM2DBL(total_time);
|
64
|
+
result->self_time = NUM2DBL(self_time);
|
65
|
+
result->wait_time = NUM2DBL(wait_time);
|
66
|
+
result->called = NUM2INT(called);
|
67
|
+
result->object = self;
|
68
|
+
return self;
|
69
|
+
}
|
70
|
+
|
71
|
+
prof_measurement_t* prof_measurement_copy(prof_measurement_t* other)
|
72
|
+
{
|
73
|
+
prof_measurement_t* result = prof_measurement_create();
|
74
|
+
result->called = other->called;
|
75
|
+
result->total_time = other->total_time;
|
76
|
+
result->self_time = other->self_time;
|
77
|
+
result->wait_time = other->wait_time;
|
78
|
+
|
79
|
+
return result;
|
80
|
+
}
|
81
|
+
|
82
|
+
static VALUE prof_measurement_initialize_copy(VALUE self, VALUE other)
|
83
|
+
{
|
84
|
+
// This object was created by Ruby either via Measurment#clone or Measurement#dup
|
85
|
+
// and thus prof_measurement_allocate was called so the object is owned by Ruby
|
86
|
+
|
87
|
+
if (self == other)
|
88
|
+
return self;
|
89
|
+
|
90
|
+
prof_measurement_t* self_ptr = prof_get_measurement(self);
|
91
|
+
prof_measurement_t* other_ptr = prof_get_measurement(other);
|
92
|
+
|
93
|
+
self_ptr->called = other_ptr->called;
|
94
|
+
self_ptr->total_time = other_ptr->total_time;
|
95
|
+
self_ptr->self_time = other_ptr->self_time;
|
96
|
+
self_ptr->wait_time = other_ptr->wait_time;
|
97
|
+
|
98
|
+
return self;
|
99
|
+
}
|
100
|
+
|
54
101
|
void prof_measurement_mark(void* data)
|
55
102
|
{
|
56
103
|
if (!data) return;
|
@@ -61,16 +108,6 @@ void prof_measurement_mark(void* data)
|
|
61
108
|
rb_gc_mark(measurement_data->object);
|
62
109
|
}
|
63
110
|
|
64
|
-
static void prof_measurement_ruby_gc_free(void* data)
|
65
|
-
{
|
66
|
-
if (data)
|
67
|
-
{
|
68
|
-
// Measurements are freed by their owning object (call info or method)
|
69
|
-
prof_measurement_t* measurement = (prof_measurement_t*)data;
|
70
|
-
measurement->object = Qnil;
|
71
|
-
}
|
72
|
-
}
|
73
|
-
|
74
111
|
void prof_measurement_free(prof_measurement_t* measurement)
|
75
112
|
{
|
76
113
|
/* Has this measurement object been accessed by Ruby? If
|
@@ -84,6 +121,27 @@ void prof_measurement_free(prof_measurement_t* measurement)
|
|
84
121
|
xfree(measurement);
|
85
122
|
}
|
86
123
|
|
124
|
+
static void prof_measurement_ruby_gc_free(void* data)
|
125
|
+
{
|
126
|
+
prof_measurement_t* measurement = (prof_measurement_t*)data;
|
127
|
+
|
128
|
+
if (!measurement)
|
129
|
+
{
|
130
|
+
// Object has already been freed by C code
|
131
|
+
return;
|
132
|
+
}
|
133
|
+
else if (measurement->owner == OWNER_RUBY)
|
134
|
+
{
|
135
|
+
// Ruby owns this object, we need to free the underlying C struct
|
136
|
+
prof_measurement_free(measurement);
|
137
|
+
}
|
138
|
+
else
|
139
|
+
{
|
140
|
+
// The Ruby object is being freed, but not the underlying C structure. So unlink the two.
|
141
|
+
measurement->object = Qnil;
|
142
|
+
}
|
143
|
+
}
|
144
|
+
|
87
145
|
size_t prof_measurement_size(const void* data)
|
88
146
|
{
|
89
147
|
return sizeof(prof_measurement_t);
|
@@ -114,6 +172,8 @@ VALUE prof_measurement_wrap(prof_measurement_t* measurement)
|
|
114
172
|
static VALUE prof_measurement_allocate(VALUE klass)
|
115
173
|
{
|
116
174
|
prof_measurement_t* measurement = prof_measurement_create();
|
175
|
+
// This object is being created by Ruby
|
176
|
+
measurement->owner = OWNER_RUBY;
|
117
177
|
measurement->object = prof_measurement_wrap(measurement);
|
118
178
|
return measurement->object;
|
119
179
|
}
|
@@ -140,6 +200,17 @@ static VALUE prof_measurement_total_time(VALUE self)
|
|
140
200
|
return rb_float_new(result->total_time);
|
141
201
|
}
|
142
202
|
|
203
|
+
/* call-seq:
|
204
|
+
total_time=value -> value
|
205
|
+
|
206
|
+
Sets the call count to n. */
|
207
|
+
static VALUE prof_measurement_set_total_time(VALUE self, VALUE value)
|
208
|
+
{
|
209
|
+
prof_measurement_t* result = prof_get_measurement(self);
|
210
|
+
result->total_time = NUM2DBL(value);
|
211
|
+
return value;
|
212
|
+
}
|
213
|
+
|
143
214
|
/* call-seq:
|
144
215
|
self_time -> float
|
145
216
|
|
@@ -152,6 +223,17 @@ prof_measurement_self_time(VALUE self)
|
|
152
223
|
return rb_float_new(result->self_time);
|
153
224
|
}
|
154
225
|
|
226
|
+
/* call-seq:
|
227
|
+
self_time=value -> value
|
228
|
+
|
229
|
+
Sets the call count to value. */
|
230
|
+
static VALUE prof_measurement_set_self_time(VALUE self, VALUE value)
|
231
|
+
{
|
232
|
+
prof_measurement_t* result = prof_get_measurement(self);
|
233
|
+
result->self_time = NUM2DBL(value);
|
234
|
+
return value;
|
235
|
+
}
|
236
|
+
|
155
237
|
/* call-seq:
|
156
238
|
wait_time -> float
|
157
239
|
|
@@ -163,6 +245,17 @@ static VALUE prof_measurement_wait_time(VALUE self)
|
|
163
245
|
return rb_float_new(result->wait_time);
|
164
246
|
}
|
165
247
|
|
248
|
+
/* call-seq:
|
249
|
+
wait_time=value -> value
|
250
|
+
|
251
|
+
Sets the wait time to value. */
|
252
|
+
static VALUE prof_measurement_set_wait_time(VALUE self, VALUE value)
|
253
|
+
{
|
254
|
+
prof_measurement_t* result = prof_get_measurement(self);
|
255
|
+
result->wait_time = NUM2DBL(value);
|
256
|
+
return value;
|
257
|
+
}
|
258
|
+
|
166
259
|
/* call-seq:
|
167
260
|
called -> int
|
168
261
|
|
@@ -174,23 +267,44 @@ static VALUE prof_measurement_called(VALUE self)
|
|
174
267
|
}
|
175
268
|
|
176
269
|
/* call-seq:
|
177
|
-
called=
|
270
|
+
called=value -> value
|
178
271
|
|
179
|
-
Sets the call count to
|
180
|
-
static VALUE prof_measurement_set_called(VALUE self, VALUE
|
272
|
+
Sets the call count to value. */
|
273
|
+
static VALUE prof_measurement_set_called(VALUE self, VALUE value)
|
181
274
|
{
|
182
|
-
|
183
|
-
|
184
|
-
|
275
|
+
prof_measurement_t* result = prof_get_measurement(self);
|
276
|
+
result->called = NUM2INT(value);
|
277
|
+
return value;
|
185
278
|
}
|
186
279
|
|
187
280
|
/* :nodoc: */
|
188
|
-
|
189
|
-
|
281
|
+
void prof_measurement_merge_internal(prof_measurement_t* self, prof_measurement_t* other)
|
282
|
+
{
|
283
|
+
self->called += other->called;
|
284
|
+
self->total_time += other->total_time;
|
285
|
+
self->self_time += other->self_time;
|
286
|
+
self->wait_time += other->wait_time;
|
287
|
+
}
|
288
|
+
|
289
|
+
/* call-seq:
|
290
|
+
merge(other)
|
291
|
+
|
292
|
+
Adds the content of other measurement to this measurement */
|
293
|
+
VALUE prof_measurement_merge(VALUE self, VALUE other)
|
294
|
+
{
|
295
|
+
prof_measurement_t* self_ptr = prof_get_measurement(self);
|
296
|
+
prof_measurement_t* other_ptr = prof_get_measurement(other);
|
297
|
+
prof_measurement_merge_internal(self_ptr, other_ptr);
|
298
|
+
return self;
|
299
|
+
}
|
300
|
+
|
301
|
+
/* :nodoc: */
|
302
|
+
static VALUE prof_measurement_dump(VALUE self)
|
190
303
|
{
|
191
304
|
prof_measurement_t* measurement_data = prof_get_measurement(self);
|
192
305
|
VALUE result = rb_hash_new();
|
193
306
|
|
307
|
+
rb_hash_aset(result, ID2SYM(rb_intern("owner")), INT2FIX(measurement_data->owner));
|
194
308
|
rb_hash_aset(result, ID2SYM(rb_intern("total_time")), rb_float_new(measurement_data->total_time));
|
195
309
|
rb_hash_aset(result, ID2SYM(rb_intern("self_time")), rb_float_new(measurement_data->self_time));
|
196
310
|
rb_hash_aset(result, ID2SYM(rb_intern("wait_time")), rb_float_new(measurement_data->wait_time));
|
@@ -206,6 +320,7 @@ prof_measurement_load(VALUE self, VALUE data)
|
|
206
320
|
prof_measurement_t* measurement = prof_get_measurement(self);
|
207
321
|
measurement->object = self;
|
208
322
|
|
323
|
+
measurement->owner = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("owner"))));
|
209
324
|
measurement->total_time = rb_num2dbl(rb_hash_aref(data, ID2SYM(rb_intern("total_time"))));
|
210
325
|
measurement->self_time = rb_num2dbl(rb_hash_aref(data, ID2SYM(rb_intern("self_time"))));
|
211
326
|
measurement->wait_time = rb_num2dbl(rb_hash_aref(data, ID2SYM(rb_intern("wait_time"))));
|
@@ -223,14 +338,19 @@ void rp_init_measure()
|
|
223
338
|
rp_init_measure_memory();
|
224
339
|
|
225
340
|
cRpMeasurement = rb_define_class_under(mProf, "Measurement", rb_cObject);
|
226
|
-
rb_undef_method(CLASS_OF(cRpMeasurement), "new");
|
227
341
|
rb_define_alloc_func(cRpMeasurement, prof_measurement_allocate);
|
228
342
|
|
343
|
+
rb_define_method(cRpMeasurement, "initialize", prof_measurement_initialize, 4);
|
344
|
+
rb_define_method(cRpMeasurement, "initialize_copy", prof_measurement_initialize_copy, 1);
|
345
|
+
rb_define_method(cRpMeasurement, "merge!", prof_measurement_merge, 1);
|
229
346
|
rb_define_method(cRpMeasurement, "called", prof_measurement_called, 0);
|
230
347
|
rb_define_method(cRpMeasurement, "called=", prof_measurement_set_called, 1);
|
231
348
|
rb_define_method(cRpMeasurement, "total_time", prof_measurement_total_time, 0);
|
349
|
+
rb_define_method(cRpMeasurement, "total_time=", prof_measurement_set_total_time, 1);
|
232
350
|
rb_define_method(cRpMeasurement, "self_time", prof_measurement_self_time, 0);
|
351
|
+
rb_define_method(cRpMeasurement, "self_time=", prof_measurement_set_self_time, 1);
|
233
352
|
rb_define_method(cRpMeasurement, "wait_time", prof_measurement_wait_time, 0);
|
353
|
+
rb_define_method(cRpMeasurement, "wait_time=", prof_measurement_set_wait_time, 1);
|
234
354
|
|
235
355
|
rb_define_method(cRpMeasurement, "_dump_data", prof_measurement_dump, 0);
|
236
356
|
rb_define_method(cRpMeasurement, "_load_data", prof_measurement_load, 1);
|
@@ -29,6 +29,7 @@ typedef struct prof_measurer_t
|
|
29
29
|
/* Callers and callee information for a method. */
|
30
30
|
typedef struct prof_measurement_t
|
31
31
|
{
|
32
|
+
prof_owner_t owner;
|
32
33
|
double total_time;
|
33
34
|
double self_time;
|
34
35
|
double wait_time;
|
@@ -40,10 +41,12 @@ prof_measurer_t* prof_measurer_create(prof_measure_mode_t measure, bool track_al
|
|
40
41
|
double prof_measure(prof_measurer_t* measurer, rb_trace_arg_t* trace_arg);
|
41
42
|
|
42
43
|
prof_measurement_t* prof_measurement_create(void);
|
44
|
+
prof_measurement_t* prof_measurement_copy(prof_measurement_t* other);
|
43
45
|
void prof_measurement_free(prof_measurement_t* measurement);
|
44
46
|
VALUE prof_measurement_wrap(prof_measurement_t* measurement);
|
45
47
|
prof_measurement_t* prof_get_measurement(VALUE self);
|
46
48
|
void prof_measurement_mark(void* data);
|
49
|
+
void prof_measurement_merge_internal(prof_measurement_t* destination, prof_measurement_t* other);
|
47
50
|
|
48
51
|
void rp_init_measure(void);
|
49
52
|
|