ruby-prof 1.4.4-x64-mingw-ucrt

Sign up to get free protection for your applications and to get access to all the features.
Files changed (106) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGES +608 -0
  3. data/LICENSE +25 -0
  4. data/README.md +5 -0
  5. data/Rakefile +98 -0
  6. data/bin/ruby-prof +328 -0
  7. data/bin/ruby-prof-check-trace +45 -0
  8. data/ext/ruby_prof/extconf.rb +22 -0
  9. data/ext/ruby_prof/rp_aggregate_call_tree.c +59 -0
  10. data/ext/ruby_prof/rp_aggregate_call_tree.h +13 -0
  11. data/ext/ruby_prof/rp_allocation.c +287 -0
  12. data/ext/ruby_prof/rp_allocation.h +31 -0
  13. data/ext/ruby_prof/rp_call_tree.c +367 -0
  14. data/ext/ruby_prof/rp_call_tree.h +43 -0
  15. data/ext/ruby_prof/rp_call_trees.c +288 -0
  16. data/ext/ruby_prof/rp_call_trees.h +28 -0
  17. data/ext/ruby_prof/rp_measure_allocations.c +47 -0
  18. data/ext/ruby_prof/rp_measure_memory.c +46 -0
  19. data/ext/ruby_prof/rp_measure_process_time.c +66 -0
  20. data/ext/ruby_prof/rp_measure_wall_time.c +64 -0
  21. data/ext/ruby_prof/rp_measurement.c +237 -0
  22. data/ext/ruby_prof/rp_measurement.h +50 -0
  23. data/ext/ruby_prof/rp_method.c +491 -0
  24. data/ext/ruby_prof/rp_method.h +62 -0
  25. data/ext/ruby_prof/rp_profile.c +915 -0
  26. data/ext/ruby_prof/rp_profile.h +35 -0
  27. data/ext/ruby_prof/rp_stack.c +212 -0
  28. data/ext/ruby_prof/rp_stack.h +53 -0
  29. data/ext/ruby_prof/rp_thread.c +362 -0
  30. data/ext/ruby_prof/rp_thread.h +39 -0
  31. data/ext/ruby_prof/ruby_prof.c +52 -0
  32. data/ext/ruby_prof/ruby_prof.h +26 -0
  33. data/ext/ruby_prof/vc/ruby_prof.sln +39 -0
  34. data/ext/ruby_prof/vc/ruby_prof.vcxproj +160 -0
  35. data/lib/3.1/ruby_prof.so +0 -0
  36. data/lib/ruby-prof/assets/call_stack_printer.html.erb +711 -0
  37. data/lib/ruby-prof/assets/call_stack_printer.png +0 -0
  38. data/lib/ruby-prof/assets/graph_printer.html.erb +355 -0
  39. data/lib/ruby-prof/call_tree.rb +57 -0
  40. data/lib/ruby-prof/call_tree_visitor.rb +36 -0
  41. data/lib/ruby-prof/compatibility.rb +99 -0
  42. data/lib/ruby-prof/exclude_common_methods.rb +198 -0
  43. data/lib/ruby-prof/measurement.rb +17 -0
  44. data/lib/ruby-prof/method_info.rb +78 -0
  45. data/lib/ruby-prof/printers/abstract_printer.rb +137 -0
  46. data/lib/ruby-prof/printers/call_info_printer.rb +53 -0
  47. data/lib/ruby-prof/printers/call_stack_printer.rb +180 -0
  48. data/lib/ruby-prof/printers/call_tree_printer.rb +147 -0
  49. data/lib/ruby-prof/printers/dot_printer.rb +132 -0
  50. data/lib/ruby-prof/printers/flat_printer.rb +53 -0
  51. data/lib/ruby-prof/printers/graph_html_printer.rb +63 -0
  52. data/lib/ruby-prof/printers/graph_printer.rb +113 -0
  53. data/lib/ruby-prof/printers/multi_printer.rb +127 -0
  54. data/lib/ruby-prof/profile.rb +37 -0
  55. data/lib/ruby-prof/rack.rb +95 -0
  56. data/lib/ruby-prof/task.rb +147 -0
  57. data/lib/ruby-prof/thread.rb +20 -0
  58. data/lib/ruby-prof/version.rb +3 -0
  59. data/lib/ruby-prof.rb +52 -0
  60. data/lib/unprof.rb +10 -0
  61. data/ruby-prof.gemspec +64 -0
  62. data/test/abstract_printer_test.rb +26 -0
  63. data/test/alias_test.rb +122 -0
  64. data/test/basic_test.rb +43 -0
  65. data/test/call_tree_visitor_test.rb +32 -0
  66. data/test/call_trees_test.rb +66 -0
  67. data/test/duplicate_names_test.rb +32 -0
  68. data/test/dynamic_method_test.rb +67 -0
  69. data/test/enumerable_test.rb +21 -0
  70. data/test/exceptions_test.rb +24 -0
  71. data/test/exclude_methods_test.rb +151 -0
  72. data/test/exclude_threads_test.rb +53 -0
  73. data/test/fiber_test.rb +129 -0
  74. data/test/gc_test.rb +100 -0
  75. data/test/inverse_call_tree_test.rb +175 -0
  76. data/test/line_number_test.rb +158 -0
  77. data/test/marshal_test.rb +145 -0
  78. data/test/measure_allocations.rb +26 -0
  79. data/test/measure_allocations_test.rb +333 -0
  80. data/test/measure_memory_test.rb +688 -0
  81. data/test/measure_process_time_test.rb +1614 -0
  82. data/test/measure_times.rb +56 -0
  83. data/test/measure_wall_time_test.rb +426 -0
  84. data/test/multi_printer_test.rb +71 -0
  85. data/test/no_method_class_test.rb +15 -0
  86. data/test/pause_resume_test.rb +175 -0
  87. data/test/prime.rb +54 -0
  88. data/test/prime_script.rb +6 -0
  89. data/test/printer_call_stack_test.rb +27 -0
  90. data/test/printer_call_tree_test.rb +30 -0
  91. data/test/printer_flat_test.rb +99 -0
  92. data/test/printer_graph_html_test.rb +59 -0
  93. data/test/printer_graph_test.rb +40 -0
  94. data/test/printers_test.rb +141 -0
  95. data/test/printing_recursive_graph_test.rb +81 -0
  96. data/test/profile_test.rb +16 -0
  97. data/test/rack_test.rb +93 -0
  98. data/test/recursive_test.rb +430 -0
  99. data/test/singleton_test.rb +38 -0
  100. data/test/stack_printer_test.rb +64 -0
  101. data/test/start_stop_test.rb +109 -0
  102. data/test/test_helper.rb +13 -0
  103. data/test/thread_test.rb +144 -0
  104. data/test/unique_call_path_test.rb +136 -0
  105. data/test/yarv_test.rb +60 -0
  106. metadata +187 -0
@@ -0,0 +1,287 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_allocation.h"
5
+
6
+ VALUE cRpAllocation;
7
+
8
+ prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
9
+ {
10
+ prof_allocation_t* result = NULL;
11
+ st_data_t value;
12
+ if (rb_st_lookup(table, key, &value))
13
+ {
14
+ result = (prof_allocation_t*)value;
15
+ }
16
+
17
+ return result;
18
+ }
19
+
20
+ void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
21
+ {
22
+ rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
23
+ }
24
+
25
+ st_data_t allocations_key(VALUE klass, int source_line)
26
+ {
27
+ return (klass << 4) + source_line;
28
+ }
29
+
30
+ /* ====== prof_allocation_t ====== */
31
+ prof_allocation_t* prof_allocation_create(void)
32
+ {
33
+ prof_allocation_t* result = ALLOC(prof_allocation_t);
34
+ result->count = 0;
35
+ result->klass = Qnil;
36
+ result->klass_name = Qnil;
37
+ result->object = Qnil;
38
+ result->memory = 0;
39
+ result->source_line = 0;
40
+ result->source_file = Qnil;
41
+ result->key = 0;
42
+
43
+ return result;
44
+ }
45
+
46
+ prof_allocation_t* prof_get_allocation(VALUE self)
47
+ {
48
+ /* Can't use Data_Get_Struct because that triggers the event hook
49
+ ending up in endless recursion. */
50
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
51
+
52
+ if (!result)
53
+ rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
54
+
55
+ return result;
56
+ }
57
+
58
+ prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
59
+ {
60
+ VALUE object = rb_tracearg_object(trace_arg);
61
+ if (BUILTIN_TYPE(object) == T_IMEMO)
62
+ return NULL;
63
+
64
+ VALUE klass = rb_obj_class(object);
65
+
66
+ int source_line = FIX2INT(rb_tracearg_lineno(trace_arg));
67
+ st_data_t key = allocations_key(klass, source_line);
68
+
69
+ prof_allocation_t* allocation = allocations_table_lookup(method->allocations_table, key);
70
+ if (!allocation)
71
+ {
72
+ allocation = prof_allocation_create();
73
+ allocation->source_line = source_line;
74
+ allocation->source_file = rb_tracearg_path(trace_arg);
75
+ allocation->klass_flags = 0;
76
+ allocation->klass = resolve_klass(klass, &allocation->klass_flags);
77
+
78
+ allocation->key = key;
79
+ allocations_table_insert(method->allocations_table, key, allocation);
80
+ }
81
+
82
+ allocation->count++;
83
+ allocation->memory += rb_obj_memsize_of(object);
84
+
85
+ return allocation;
86
+ }
87
+
88
+ static void prof_allocation_ruby_gc_free(void* data)
89
+ {
90
+ if (data)
91
+ {
92
+ prof_allocation_t* allocation = (prof_allocation_t*)data;
93
+ allocation->object = Qnil;
94
+ }
95
+ }
96
+
97
+ void prof_allocation_free(prof_allocation_t* allocation)
98
+ {
99
+ /* Has this allocation object been accessed by Ruby? If
100
+ yes clean it up so to avoid a segmentation fault. */
101
+ if (allocation->object != Qnil)
102
+ {
103
+ RTYPEDDATA(allocation->object)->data = NULL;
104
+ allocation->object = Qnil;
105
+ }
106
+
107
+ xfree(allocation);
108
+ }
109
+
110
+ size_t prof_allocation_size(const void* data)
111
+ {
112
+ return sizeof(prof_allocation_t);
113
+ }
114
+
115
+ void prof_allocation_mark(void* data)
116
+ {
117
+ if (!data) return;
118
+
119
+ prof_allocation_t* allocation = (prof_allocation_t*)data;
120
+ if (allocation->object != Qnil)
121
+ rb_gc_mark(allocation->object);
122
+
123
+ if (allocation->klass != Qnil)
124
+ rb_gc_mark(allocation->klass);
125
+
126
+ if (allocation->klass_name != Qnil)
127
+ rb_gc_mark(allocation->klass_name);
128
+
129
+ if (allocation->source_file != Qnil)
130
+ rb_gc_mark(allocation->source_file);
131
+ }
132
+
133
+ static const rb_data_type_t allocation_type =
134
+ {
135
+ .wrap_struct_name = "Allocation",
136
+ .function =
137
+ {
138
+ .dmark = prof_allocation_mark,
139
+ .dfree = prof_allocation_ruby_gc_free,
140
+ .dsize = prof_allocation_size,
141
+ },
142
+ .data = NULL,
143
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
144
+ };
145
+
146
+ VALUE prof_allocation_wrap(prof_allocation_t* allocation)
147
+ {
148
+ if (allocation->object == Qnil)
149
+ {
150
+ allocation->object = TypedData_Wrap_Struct(cRpAllocation, &allocation_type, allocation);
151
+ }
152
+ return allocation->object;
153
+ }
154
+
155
+ static VALUE prof_allocation_allocate(VALUE klass)
156
+ {
157
+ prof_allocation_t* allocation = prof_allocation_create();
158
+ allocation->object = prof_allocation_wrap(allocation);
159
+ return allocation->object;
160
+ }
161
+
162
+ prof_allocation_t* prof_allocation_get(VALUE self)
163
+ {
164
+ /* Can't use Data_Get_Struct because that triggers the event hook
165
+ ending up in endless recursion. */
166
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
167
+ if (!result)
168
+ rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
169
+
170
+ return result;
171
+ }
172
+
173
+ /* call-seq:
174
+ klass -> Class
175
+
176
+ Returns the type of Class being allocated. */
177
+ static VALUE prof_allocation_klass_name(VALUE self)
178
+ {
179
+ prof_allocation_t* allocation = prof_allocation_get(self);
180
+
181
+ if (allocation->klass_name == Qnil)
182
+ allocation->klass_name = resolve_klass_name(allocation->klass, &allocation->klass_flags);
183
+
184
+ return allocation->klass_name;
185
+ }
186
+
187
+ /* call-seq:
188
+ klass_flags -> integer
189
+
190
+ Returns the klass flags */
191
+
192
+ static VALUE prof_allocation_klass_flags(VALUE self)
193
+ {
194
+ prof_allocation_t* allocation = prof_allocation_get(self);
195
+ return INT2FIX(allocation->klass_flags);
196
+ }
197
+
198
+ /* call-seq:
199
+ source_file -> string
200
+
201
+ Returns the the line number where objects were allocated. */
202
+ static VALUE prof_allocation_source_file(VALUE self)
203
+ {
204
+ prof_allocation_t* allocation = prof_allocation_get(self);
205
+ return allocation->source_file;
206
+ }
207
+
208
+ /* call-seq:
209
+ line -> number
210
+
211
+ Returns the the line number where objects were allocated. */
212
+ static VALUE prof_allocation_source_line(VALUE self)
213
+ {
214
+ prof_allocation_t* allocation = prof_allocation_get(self);
215
+ return INT2FIX(allocation->source_line);
216
+ }
217
+
218
+ /* call-seq:
219
+ count -> number
220
+
221
+ Returns the number of times this class has been allocated. */
222
+ static VALUE prof_allocation_count(VALUE self)
223
+ {
224
+ prof_allocation_t* allocation = prof_allocation_get(self);
225
+ return INT2FIX(allocation->count);
226
+ }
227
+
228
+ /* call-seq:
229
+ memory -> number
230
+
231
+ Returns the amount of memory allocated. */
232
+ static VALUE prof_allocation_memory(VALUE self)
233
+ {
234
+ prof_allocation_t* allocation = prof_allocation_get(self);
235
+ return ULL2NUM(allocation->memory);
236
+ }
237
+
238
+ /* :nodoc: */
239
+ static VALUE prof_allocation_dump(VALUE self)
240
+ {
241
+ prof_allocation_t* allocation = prof_get_allocation(self);
242
+
243
+ VALUE result = rb_hash_new();
244
+
245
+ rb_hash_aset(result, ID2SYM(rb_intern("key")), INT2FIX(allocation->key));
246
+ rb_hash_aset(result, ID2SYM(rb_intern("klass_name")), prof_allocation_klass_name(self));
247
+ rb_hash_aset(result, ID2SYM(rb_intern("klass_flags")), INT2FIX(allocation->klass_flags));
248
+ rb_hash_aset(result, ID2SYM(rb_intern("source_file")), allocation->source_file);
249
+ rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(allocation->source_line));
250
+ rb_hash_aset(result, ID2SYM(rb_intern("count")), INT2FIX(allocation->count));
251
+ rb_hash_aset(result, ID2SYM(rb_intern("memory")), LONG2FIX(allocation->memory));
252
+
253
+ return result;
254
+ }
255
+
256
+ /* :nodoc: */
257
+ static VALUE prof_allocation_load(VALUE self, VALUE data)
258
+ {
259
+ prof_allocation_t* allocation = prof_get_allocation(self);
260
+ allocation->object = self;
261
+
262
+ allocation->key = FIX2LONG(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
263
+ allocation->klass_name = rb_hash_aref(data, ID2SYM(rb_intern("klass_name")));
264
+ allocation->klass_flags = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("klass_flags"))));
265
+ allocation->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
266
+ allocation->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
267
+ allocation->count = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("count"))));
268
+ allocation->memory = FIX2LONG(rb_hash_aref(data, ID2SYM(rb_intern("memory"))));
269
+
270
+ return data;
271
+ }
272
+
273
+ void rp_init_allocation(void)
274
+ {
275
+ cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cObject);
276
+ rb_undef_method(CLASS_OF(cRpAllocation), "new");
277
+ rb_define_alloc_func(cRpAllocation, prof_allocation_allocate);
278
+
279
+ rb_define_method(cRpAllocation, "klass_name", prof_allocation_klass_name, 0);
280
+ rb_define_method(cRpAllocation, "klass_flags", prof_allocation_klass_flags, 0);
281
+ rb_define_method(cRpAllocation, "source_file", prof_allocation_source_file, 0);
282
+ rb_define_method(cRpAllocation, "line", prof_allocation_source_line, 0);
283
+ rb_define_method(cRpAllocation, "count", prof_allocation_count, 0);
284
+ rb_define_method(cRpAllocation, "memory", prof_allocation_memory, 0);
285
+ rb_define_method(cRpAllocation, "_dump_data", prof_allocation_dump, 0);
286
+ rb_define_method(cRpAllocation, "_load_data", prof_allocation_load, 1);
287
+ }
@@ -0,0 +1,31 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #ifndef _RP_ALLOCATION_
5
+ #define _RP_ALLOCATION_
6
+
7
+ #include "ruby_prof.h"
8
+ #include "rp_method.h"
9
+
10
+ typedef struct prof_allocation_t
11
+ {
12
+ st_data_t key; /* Key in hash table */
13
+ unsigned int klass_flags; /* Information about the type of class */
14
+ VALUE klass; /* Klass that was created */
15
+ VALUE klass_name; /* Name of the class that was created */
16
+ VALUE source_file; /* Line number where allocation happens */
17
+ int source_line; /* Line number where allocation happens */
18
+ int count; /* Number of allocations */
19
+ size_t memory; /* Amount of allocated memory */
20
+ VALUE object; /* Cache to wrapped object */
21
+ } prof_allocation_t;
22
+
23
+ void rp_init_allocation(void);
24
+ void prof_allocation_free(prof_allocation_t* allocation);
25
+ void prof_allocation_mark(void* data);
26
+ VALUE prof_allocation_wrap(prof_allocation_t* allocation);
27
+ prof_allocation_t* prof_allocation_get(VALUE self);
28
+ prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg);
29
+
30
+
31
+ #endif //_RP_ALLOCATION_
@@ -0,0 +1,367 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_call_tree.h"
5
+
6
+ VALUE cRpCallTree;
7
+
8
+ /* ======= prof_call_tree_t ========*/
9
+ prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t* parent, VALUE source_file, int source_line)
10
+ {
11
+ prof_call_tree_t* result = ALLOC(prof_call_tree_t);
12
+ result->method = method;
13
+ result->parent = parent;
14
+ result->object = Qnil;
15
+ result->visits = 0;
16
+ result->source_line = source_line;
17
+ result->source_file = source_file;
18
+ result->children = rb_st_init_numtable();
19
+ result->measurement = prof_measurement_create();
20
+
21
+ return result;
22
+ }
23
+
24
+ prof_call_tree_t* prof_call_tree_copy(prof_call_tree_t* other)
25
+ {
26
+ prof_call_tree_t* result = ALLOC(prof_call_tree_t);
27
+ result->children = rb_st_init_numtable();
28
+ result->object = Qnil;
29
+ result->visits = 0;
30
+
31
+ result->method = other->method;
32
+ result->parent = other->parent;
33
+ result->source_line = other->source_line;
34
+ result->source_file = other->source_file;
35
+
36
+ result->measurement = prof_measurement_create();
37
+ result->measurement->called = other->measurement->called;
38
+ result->measurement->total_time = other->measurement->total_time;
39
+ result->measurement->self_time = other->measurement->self_time;
40
+ result->measurement->wait_time = other->measurement->wait_time;
41
+ result->measurement->object = Qnil;
42
+
43
+ return result;
44
+ }
45
+
46
+ void prof_call_tree_merge(prof_call_tree_t* result, prof_call_tree_t* other)
47
+ {
48
+ result->measurement->called += other->measurement->called;
49
+ result->measurement->total_time += other->measurement->total_time;
50
+ result->measurement->self_time += other->measurement->self_time;
51
+ result->measurement->wait_time += other->measurement->wait_time;
52
+ }
53
+
54
+ static int prof_call_tree_collect_children(st_data_t key, st_data_t value, st_data_t result)
55
+ {
56
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
57
+ VALUE arr = (VALUE)result;
58
+ rb_ary_push(arr, prof_call_tree_wrap(call_tree));
59
+ return ST_CONTINUE;
60
+ }
61
+
62
+ static int prof_call_tree_mark_children(st_data_t key, st_data_t value, st_data_t data)
63
+ {
64
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
65
+ rb_st_foreach(call_tree->children, prof_call_tree_mark_children, data);
66
+ prof_call_tree_mark(call_tree);
67
+ return ST_CONTINUE;
68
+ }
69
+
70
+ void prof_call_tree_mark(void* data)
71
+ {
72
+ if (!data)
73
+ return;
74
+
75
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
76
+
77
+ if (call_tree->object != Qnil)
78
+ rb_gc_mark(call_tree->object);
79
+
80
+ if (call_tree->source_file != Qnil)
81
+ rb_gc_mark(call_tree->source_file);
82
+
83
+ prof_method_mark(call_tree->method);
84
+ prof_measurement_mark(call_tree->measurement);
85
+
86
+ // Recurse down through the whole call tree but only from the top node
87
+ // to avoid calling mark over and over and over.
88
+ if (!call_tree->parent)
89
+ rb_st_foreach(call_tree->children, prof_call_tree_mark_children, 0);
90
+ }
91
+
92
+ static void prof_call_tree_ruby_gc_free(void* data)
93
+ {
94
+ if (data)
95
+ {
96
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
97
+ call_tree->object = Qnil;
98
+ }
99
+ }
100
+
101
+ static int prof_call_tree_free_children(st_data_t key, st_data_t value, st_data_t data)
102
+ {
103
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
104
+ prof_call_tree_free(call_tree);
105
+ return ST_CONTINUE;
106
+ }
107
+
108
+ void prof_call_tree_free(prof_call_tree_t* call_tree_data)
109
+ {
110
+ /* Has this call info object been accessed by Ruby? If
111
+ yes clean it up so to avoid a segmentation fault. */
112
+ if (call_tree_data->object != Qnil)
113
+ {
114
+ RTYPEDDATA(call_tree_data->object)->data = NULL;
115
+ call_tree_data->object = Qnil;
116
+ }
117
+
118
+ // Free children
119
+ rb_st_foreach(call_tree_data->children, prof_call_tree_free_children, 0);
120
+ rb_st_free_table(call_tree_data->children);
121
+
122
+ // Free measurement
123
+ prof_measurement_free(call_tree_data->measurement);
124
+
125
+ // Finally free self
126
+ xfree(call_tree_data);
127
+ }
128
+
129
+ size_t prof_call_tree_size(const void* data)
130
+ {
131
+ return sizeof(prof_call_tree_t);
132
+ }
133
+
134
+ static const rb_data_type_t call_tree_type =
135
+ {
136
+ .wrap_struct_name = "CallTree",
137
+ .function =
138
+ {
139
+ .dmark = prof_call_tree_mark,
140
+ .dfree = prof_call_tree_ruby_gc_free,
141
+ .dsize = prof_call_tree_size,
142
+ },
143
+ .data = NULL,
144
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
145
+ };
146
+
147
+ VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree)
148
+ {
149
+ if (call_tree->object == Qnil)
150
+ {
151
+ call_tree->object = TypedData_Wrap_Struct(cRpCallTree, &call_tree_type, call_tree);
152
+ }
153
+ return call_tree->object;
154
+ }
155
+
156
+ static VALUE prof_call_tree_allocate(VALUE klass)
157
+ {
158
+ prof_call_tree_t* call_tree = prof_call_tree_create(NULL, NULL, Qnil, 0);
159
+ call_tree->object = prof_call_tree_wrap(call_tree);
160
+ return call_tree->object;
161
+ }
162
+
163
+ prof_call_tree_t* prof_get_call_tree(VALUE self)
164
+ {
165
+ /* Can't use Data_Get_Struct because that triggers the event hook
166
+ ending up in endless recursion. */
167
+ prof_call_tree_t* result = RTYPEDDATA_DATA(self);
168
+
169
+ if (!result)
170
+ rb_raise(rb_eRuntimeError, "This RubyProf::CallTree instance has already been freed, likely because its profile has been freed.");
171
+
172
+ return result;
173
+ }
174
+
175
+ /* ======= Call Tree Table ========*/
176
+ static size_t call_tree_table_insert(st_table* table, st_data_t key, prof_call_tree_t* val)
177
+ {
178
+ return rb_st_insert(table, (st_data_t)key, (st_data_t)val);
179
+ }
180
+
181
+ prof_call_tree_t* call_tree_table_lookup(st_table* table, st_data_t key)
182
+ {
183
+ st_data_t val;
184
+ if (rb_st_lookup(table, (st_data_t)key, &val))
185
+ {
186
+ return (prof_call_tree_t*)val;
187
+ }
188
+ else
189
+ {
190
+ return NULL;
191
+ }
192
+ }
193
+
194
+ uint32_t prof_call_figure_depth(prof_call_tree_t* call_tree_data)
195
+ {
196
+ uint32_t result = 0;
197
+
198
+ while (call_tree_data->parent)
199
+ {
200
+ result++;
201
+ call_tree_data = call_tree_data->parent;
202
+ }
203
+
204
+ return result;
205
+ }
206
+
207
+ void prof_call_tree_add_parent(prof_call_tree_t* self, prof_call_tree_t* parent)
208
+ {
209
+ prof_call_tree_add_child(parent, self);
210
+ self->parent = parent;
211
+ }
212
+
213
+ void prof_call_tree_add_child(prof_call_tree_t* self, prof_call_tree_t* child)
214
+ {
215
+ call_tree_table_insert(self->children, child->method->key, child);
216
+ }
217
+
218
+ /* ======= RubyProf::CallTree ========*/
219
+
220
+ /* call-seq:
221
+ parent -> call_tree
222
+
223
+ Returns the CallTree parent call_tree object (the method that called this method).*/
224
+ static VALUE prof_call_tree_parent(VALUE self)
225
+ {
226
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
227
+ if (call_tree->parent)
228
+ return prof_call_tree_wrap(call_tree->parent);
229
+ else
230
+ return Qnil;
231
+ }
232
+
233
+ /* call-seq:
234
+ callees -> array
235
+
236
+ Returns an array of call info objects that this method called (ie, children).*/
237
+ static VALUE prof_call_tree_children(VALUE self)
238
+ {
239
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
240
+ VALUE result = rb_ary_new();
241
+ rb_st_foreach(call_tree->children, prof_call_tree_collect_children, result);
242
+ return result;
243
+ }
244
+
245
+ /* call-seq:
246
+ called -> MethodInfo
247
+
248
+ Returns the target method. */
249
+ static VALUE prof_call_tree_target(VALUE self)
250
+ {
251
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
252
+ return prof_method_wrap(call_tree->method);
253
+ }
254
+
255
+ /* call-seq:
256
+ called -> Measurement
257
+
258
+ Returns the measurement associated with this call_tree. */
259
+ static VALUE prof_call_tree_measurement(VALUE self)
260
+ {
261
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
262
+ return prof_measurement_wrap(call_tree->measurement);
263
+ }
264
+
265
+ /* call-seq:
266
+ depth -> int
267
+
268
+ returns the depth of this call info in the call graph */
269
+ static VALUE prof_call_tree_depth(VALUE self)
270
+ {
271
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(self);
272
+ uint32_t depth = prof_call_figure_depth(call_tree_data);
273
+ return rb_int_new(depth);
274
+ }
275
+
276
+ /* call-seq:
277
+ source_file => string
278
+
279
+ return the source file of the method
280
+ */
281
+ static VALUE prof_call_tree_source_file(VALUE self)
282
+ {
283
+ prof_call_tree_t* result = prof_get_call_tree(self);
284
+ return result->source_file;
285
+ }
286
+
287
+ /* call-seq:
288
+ line_no -> int
289
+
290
+ returns the line number of the method */
291
+ static VALUE prof_call_tree_line(VALUE self)
292
+ {
293
+ prof_call_tree_t* result = prof_get_call_tree(self);
294
+ return INT2FIX(result->source_line);
295
+ }
296
+
297
+ /* :nodoc: */
298
+ static VALUE prof_call_tree_dump(VALUE self)
299
+ {
300
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(self);
301
+ VALUE result = rb_hash_new();
302
+
303
+ rb_hash_aset(result, ID2SYM(rb_intern("measurement")), prof_measurement_wrap(call_tree_data->measurement));
304
+
305
+ rb_hash_aset(result, ID2SYM(rb_intern("source_file")), call_tree_data->source_file);
306
+ rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(call_tree_data->source_line));
307
+
308
+ rb_hash_aset(result, ID2SYM(rb_intern("parent")), prof_call_tree_parent(self));
309
+ rb_hash_aset(result, ID2SYM(rb_intern("children")), prof_call_tree_children(self));
310
+ rb_hash_aset(result, ID2SYM(rb_intern("target")), prof_call_tree_target(self));
311
+
312
+ return result;
313
+ }
314
+
315
+ /* :nodoc: */
316
+ static VALUE prof_call_tree_load(VALUE self, VALUE data)
317
+ {
318
+ VALUE target = Qnil;
319
+ VALUE parent = Qnil;
320
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
321
+ call_tree->object = self;
322
+
323
+ VALUE measurement = rb_hash_aref(data, ID2SYM(rb_intern("measurement")));
324
+ call_tree->measurement = prof_get_measurement(measurement);
325
+
326
+ call_tree->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
327
+ call_tree->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
328
+
329
+ parent = rb_hash_aref(data, ID2SYM(rb_intern("parent")));
330
+ if (parent != Qnil)
331
+ call_tree->parent = prof_get_call_tree(parent);
332
+
333
+ VALUE callees = rb_hash_aref(data, ID2SYM(rb_intern("children")));
334
+ for (int i = 0; i < rb_array_len(callees); i++)
335
+ {
336
+ VALUE call_tree_object = rb_ary_entry(callees, i);
337
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(call_tree_object);
338
+
339
+ st_data_t key = call_tree_data->method ? call_tree_data->method->key : method_key(Qnil, 0);
340
+ call_tree_table_insert(call_tree->children, key, call_tree_data);
341
+ }
342
+
343
+ target = rb_hash_aref(data, ID2SYM(rb_intern("target")));
344
+ call_tree->method = prof_get_method(target);
345
+
346
+ return data;
347
+ }
348
+
349
+ void rp_init_call_tree()
350
+ {
351
+ /* CallTree */
352
+ cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cObject);
353
+ rb_undef_method(CLASS_OF(cRpCallTree), "new");
354
+ rb_define_alloc_func(cRpCallTree, prof_call_tree_allocate);
355
+
356
+ rb_define_method(cRpCallTree, "parent", prof_call_tree_parent, 0);
357
+ rb_define_method(cRpCallTree, "children", prof_call_tree_children, 0);
358
+ rb_define_method(cRpCallTree, "target", prof_call_tree_target, 0);
359
+ rb_define_method(cRpCallTree, "measurement", prof_call_tree_measurement, 0);
360
+
361
+ rb_define_method(cRpCallTree, "depth", prof_call_tree_depth, 0);
362
+ rb_define_method(cRpCallTree, "source_file", prof_call_tree_source_file, 0);
363
+ rb_define_method(cRpCallTree, "line", prof_call_tree_line, 0);
364
+
365
+ rb_define_method(cRpCallTree, "_dump_data", prof_call_tree_dump, 0);
366
+ rb_define_method(cRpCallTree, "_load_data", prof_call_tree_load, 1);
367
+ }