ruby-prof 1.1.0-x64-mingw32 → 1.4.2-x64-mingw32

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES +48 -1
  3. data/Rakefile +2 -14
  4. data/bin/ruby-prof +100 -152
  5. data/ext/ruby_prof/extconf.rb +8 -28
  6. data/ext/ruby_prof/rp_aggregate_call_tree.c +59 -0
  7. data/ext/ruby_prof/rp_aggregate_call_tree.h +13 -0
  8. data/ext/ruby_prof/rp_allocation.c +67 -59
  9. data/ext/ruby_prof/rp_allocation.h +3 -3
  10. data/ext/ruby_prof/rp_call_tree.c +369 -0
  11. data/ext/ruby_prof/rp_call_tree.h +43 -0
  12. data/ext/ruby_prof/rp_call_trees.c +288 -0
  13. data/ext/ruby_prof/rp_call_trees.h +28 -0
  14. data/ext/ruby_prof/rp_measure_allocations.c +12 -14
  15. data/ext/ruby_prof/rp_measure_process_time.c +12 -14
  16. data/ext/ruby_prof/rp_measure_wall_time.c +17 -15
  17. data/ext/ruby_prof/rp_measurement.c +47 -40
  18. data/ext/ruby_prof/rp_measurement.h +7 -7
  19. data/ext/ruby_prof/rp_method.c +116 -255
  20. data/ext/ruby_prof/rp_method.h +31 -39
  21. data/ext/ruby_prof/rp_profile.c +316 -303
  22. data/ext/ruby_prof/rp_profile.h +1 -3
  23. data/ext/ruby_prof/rp_stack.c +122 -106
  24. data/ext/ruby_prof/rp_stack.h +17 -20
  25. data/ext/ruby_prof/rp_thread.c +136 -111
  26. data/ext/ruby_prof/rp_thread.h +12 -9
  27. data/ext/ruby_prof/ruby_prof.c +27 -23
  28. data/ext/ruby_prof/ruby_prof.h +9 -0
  29. data/ext/ruby_prof/vc/ruby_prof.sln +8 -0
  30. data/ext/ruby_prof/vc/ruby_prof.vcxproj +22 -7
  31. data/lib/2.7/ruby_prof.so +0 -0
  32. data/lib/ruby-prof.rb +5 -5
  33. data/lib/ruby-prof/assets/call_stack_printer.html.erb +4 -7
  34. data/lib/ruby-prof/assets/graph_printer.html.erb +5 -6
  35. data/lib/ruby-prof/{call_info.rb → call_tree.rb} +6 -6
  36. data/lib/ruby-prof/call_tree_visitor.rb +36 -0
  37. data/lib/ruby-prof/compatibility.rb +0 -10
  38. data/lib/ruby-prof/measurement.rb +5 -2
  39. data/lib/ruby-prof/method_info.rb +3 -15
  40. data/lib/ruby-prof/printers/abstract_printer.rb +12 -2
  41. data/lib/ruby-prof/printers/call_info_printer.rb +12 -10
  42. data/lib/ruby-prof/printers/call_stack_printer.rb +20 -22
  43. data/lib/ruby-prof/printers/call_tree_printer.rb +1 -1
  44. data/lib/ruby-prof/printers/dot_printer.rb +3 -3
  45. data/lib/ruby-prof/printers/flat_printer.rb +3 -2
  46. data/lib/ruby-prof/printers/graph_printer.rb +4 -5
  47. data/lib/ruby-prof/printers/multi_printer.rb +2 -2
  48. data/lib/ruby-prof/profile.rb +8 -4
  49. data/lib/ruby-prof/rack.rb +51 -127
  50. data/lib/ruby-prof/thread.rb +3 -18
  51. data/lib/ruby-prof/version.rb +1 -1
  52. data/ruby-prof.gemspec +7 -0
  53. data/test/alias_test.rb +42 -45
  54. data/test/basic_test.rb +0 -86
  55. data/test/{call_info_visitor_test.rb → call_tree_visitor_test.rb} +6 -5
  56. data/test/call_trees_test.rb +66 -0
  57. data/test/exclude_methods_test.rb +17 -12
  58. data/test/fiber_test.rb +95 -39
  59. data/test/gc_test.rb +36 -42
  60. data/test/inverse_call_tree_test.rb +175 -0
  61. data/test/line_number_test.rb +67 -70
  62. data/test/marshal_test.rb +7 -13
  63. data/test/measure_allocations_test.rb +224 -234
  64. data/test/measure_allocations_trace_test.rb +224 -234
  65. data/test/measure_memory_trace_test.rb +814 -469
  66. data/test/measure_process_time_test.rb +0 -64
  67. data/test/measure_times.rb +2 -0
  68. data/test/measure_wall_time_test.rb +34 -58
  69. data/test/pause_resume_test.rb +19 -10
  70. data/test/prime.rb +1 -3
  71. data/test/prime_script.rb +6 -0
  72. data/test/printer_call_stack_test.rb +0 -1
  73. data/test/printer_call_tree_test.rb +0 -1
  74. data/test/printer_flat_test.rb +61 -30
  75. data/test/printer_graph_html_test.rb +0 -1
  76. data/test/printer_graph_test.rb +3 -4
  77. data/test/printers_test.rb +2 -2
  78. data/test/printing_recursive_graph_test.rb +1 -1
  79. data/test/profile_test.rb +16 -0
  80. data/test/rack_test.rb +0 -64
  81. data/test/recursive_test.rb +50 -54
  82. data/test/start_stop_test.rb +19 -19
  83. data/test/test_helper.rb +6 -17
  84. data/test/thread_test.rb +11 -11
  85. data/test/unique_call_path_test.rb +25 -95
  86. metadata +22 -11
  87. data/ext/ruby_prof/rp_call_info.c +0 -271
  88. data/ext/ruby_prof/rp_call_info.h +0 -35
  89. data/lib/2.6.5/ruby_prof.so +0 -0
  90. data/lib/ruby-prof/call_info_visitor.rb +0 -38
  91. data/test/parser_timings.rb +0 -24
@@ -0,0 +1,59 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_aggregate_call_tree.h"
5
+
6
+ VALUE cRpAggregateCallTree;
7
+
8
+ void prof_aggregate_call_tree_mark(void* data)
9
+ {
10
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
11
+
12
+ if (call_tree->object != Qnil)
13
+ rb_gc_mark(call_tree->object);
14
+
15
+ if (call_tree->source_file != Qnil)
16
+ rb_gc_mark(call_tree->source_file);
17
+
18
+ prof_measurement_mark(call_tree->measurement);
19
+ }
20
+
21
+ static void prof_aggregate_call_tree_ruby_gc_free(void* data)
22
+ {
23
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
24
+ prof_call_tree_free(call_tree);
25
+ }
26
+
27
+ size_t prof_aggregate_call_tree_size(const void* data)
28
+ {
29
+ return sizeof(prof_call_tree_t);
30
+ }
31
+
32
+ static const rb_data_type_t aggregate_call_tree_type =
33
+ {
34
+ .wrap_struct_name = "Aggregate_CallTree",
35
+ .function =
36
+ {
37
+ .dmark = prof_aggregate_call_tree_mark,
38
+ .dfree = prof_aggregate_call_tree_ruby_gc_free,
39
+ .dsize = prof_aggregate_call_tree_size,
40
+ },
41
+ .data = NULL,
42
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
43
+ };
44
+
45
+ VALUE prof_aggregate_call_tree_wrap(prof_call_tree_t* call_tree)
46
+ {
47
+ if (call_tree->object == Qnil)
48
+ {
49
+ call_tree->object = TypedData_Wrap_Struct(cRpAggregateCallTree, &aggregate_call_tree_type, call_tree);
50
+ }
51
+ return call_tree->object;
52
+ }
53
+
54
+ void rp_init_aggregate_call_tree()
55
+ {
56
+ // AggregateCallTree
57
+ cRpAggregateCallTree = rb_define_class_under(mProf, "AggregateCallTree", cRpCallTree);
58
+ rb_undef_method(CLASS_OF(cRpAggregateCallTree), "new");
59
+ }
@@ -0,0 +1,13 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #ifndef __RP_AGGREGATE_CALL_TREE_H__
5
+ #define __RP_AGGREGATE_CALL_TREE_H__
6
+
7
+ #include "ruby_prof.h"
8
+ #include "rp_call_tree.h"
9
+
10
+ void rp_init_aggregate_call_tree(void);
11
+ VALUE prof_aggregate_call_tree_wrap(prof_call_tree_t* call_tree);
12
+
13
+ #endif //__RP_AGGREGATE_CALL_TREE_H__
@@ -5,12 +5,11 @@
5
5
 
6
6
  VALUE cRpAllocation;
7
7
 
8
- prof_allocation_t*
9
- allocations_table_lookup(st_table *table, st_data_t key)
8
+ prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
10
9
  {
11
10
  prof_allocation_t* result = NULL;
12
11
  st_data_t value;
13
- if (st_lookup(table, key, &value))
12
+ if (rb_st_lookup(table, key, &value))
14
13
  {
15
14
  result = (prof_allocation_t*)value;
16
15
  }
@@ -18,23 +17,20 @@ allocations_table_lookup(st_table *table, st_data_t key)
18
17
  return result;
19
18
  }
20
19
 
21
- void
22
- allocations_table_insert(st_table *table, st_data_t key, prof_allocation_t * allocation)
20
+ void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
23
21
  {
24
- st_insert(table, (st_data_t)key, (st_data_t)allocation);
22
+ rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
25
23
  }
26
24
 
27
- st_data_t
28
- allocations_key(VALUE klass, int source_line)
25
+ st_data_t allocations_key(VALUE klass, int source_line)
29
26
  {
30
27
  return (klass << 4) + source_line;
31
28
  }
32
29
 
33
30
  /* ====== prof_allocation_t ====== */
34
- prof_allocation_t*
35
- prof_allocation_create(void)
31
+ prof_allocation_t* prof_allocation_create(void)
36
32
  {
37
- prof_allocation_t *result = ALLOC(prof_allocation_t);
33
+ prof_allocation_t* result = ALLOC(prof_allocation_t);
38
34
  result->count = 0;
39
35
  result->klass = Qnil;
40
36
  result->klass_name = Qnil;
@@ -47,8 +43,19 @@ prof_allocation_create(void)
47
43
  return result;
48
44
  }
49
45
 
50
- prof_allocation_t*
51
- prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
46
+ prof_allocation_t* prof_get_allocation(VALUE self)
47
+ {
48
+ /* Can't use Data_Get_Struct because that triggers the event hook
49
+ ending up in endless recursion. */
50
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
51
+
52
+ if (!result)
53
+ rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
54
+
55
+ return result;
56
+ }
57
+
58
+ prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
52
59
  {
53
60
  VALUE object = rb_tracearg_object(trace_arg);
54
61
  if (BUILTIN_TYPE(object) == T_IMEMO)
@@ -78,76 +85,85 @@ prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
78
85
  return allocation;
79
86
  }
80
87
 
81
- static void
82
- prof_allocation_ruby_gc_free(void *data)
88
+ static void prof_allocation_ruby_gc_free(void* data)
83
89
  {
84
- prof_allocation_t* allocation = (prof_allocation_t*)data;
90
+ if (data)
91
+ {
92
+ prof_allocation_t* allocation = (prof_allocation_t*)data;
93
+ allocation->object = Qnil;
94
+ }
95
+ }
85
96
 
97
+ void prof_allocation_free(prof_allocation_t* allocation)
98
+ {
86
99
  /* Has this allocation object been accessed by Ruby? If
87
100
  yes clean it up so to avoid a segmentation fault. */
88
101
  if (allocation->object != Qnil)
89
102
  {
90
- RDATA(allocation->object)->dmark = NULL;
91
- RDATA(allocation->object)->dfree = NULL;
92
- RDATA(allocation->object)->data = NULL;
103
+ RTYPEDDATA(allocation->object)->data = NULL;
93
104
  allocation->object = Qnil;
94
105
  }
95
- }
96
106
 
97
- void
98
- prof_allocation_free(prof_allocation_t* allocation)
99
- {
100
- prof_allocation_ruby_gc_free(allocation);
101
107
  xfree(allocation);
102
108
  }
103
109
 
104
- size_t
105
- prof_allocation_size(const void* data)
110
+ size_t prof_allocation_size(const void* data)
106
111
  {
107
112
  return sizeof(prof_allocation_t);
108
113
  }
109
114
 
110
- void
111
- prof_allocation_mark(void *data)
115
+ void prof_allocation_mark(void* data)
112
116
  {
117
+ if (!data) return;
118
+
113
119
  prof_allocation_t* allocation = (prof_allocation_t*)data;
120
+ if (allocation->object != Qnil)
121
+ rb_gc_mark(allocation->object);
122
+
114
123
  if (allocation->klass != Qnil)
115
124
  rb_gc_mark(allocation->klass);
116
-
125
+
117
126
  if (allocation->klass_name != Qnil)
118
127
  rb_gc_mark(allocation->klass_name);
119
128
 
120
- if (allocation->object != Qnil)
121
- rb_gc_mark(allocation->object);
122
-
123
129
  if (allocation->source_file != Qnil)
124
130
  rb_gc_mark(allocation->source_file);
125
131
  }
126
132
 
127
- VALUE
128
- prof_allocation_wrap(prof_allocation_t *allocation)
133
+ static const rb_data_type_t allocation_type =
134
+ {
135
+ .wrap_struct_name = "Allocation",
136
+ .function =
137
+ {
138
+ .dmark = prof_allocation_mark,
139
+ .dfree = prof_allocation_ruby_gc_free,
140
+ .dsize = prof_allocation_size,
141
+ },
142
+ .data = NULL,
143
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
144
+ };
145
+
146
+ VALUE prof_allocation_wrap(prof_allocation_t* allocation)
129
147
  {
130
148
  if (allocation->object == Qnil)
131
149
  {
132
- allocation->object = Data_Wrap_Struct(cRpAllocation, prof_allocation_mark , prof_allocation_ruby_gc_free, allocation);
150
+ allocation->object = TypedData_Wrap_Struct(cRpAllocation, &allocation_type, allocation);
133
151
  }
134
152
  return allocation->object;
135
153
  }
136
154
 
137
- static VALUE
138
- prof_allocation_allocate(VALUE klass)
155
+ static VALUE prof_allocation_allocate(VALUE klass)
139
156
  {
140
157
  prof_allocation_t* allocation = prof_allocation_create();
141
158
  allocation->object = prof_allocation_wrap(allocation);
142
159
  return allocation->object;
143
160
  }
144
161
 
145
- prof_allocation_t*
146
- prof_allocation_get(VALUE self)
162
+ prof_allocation_t* prof_allocation_get(VALUE self)
147
163
  {
148
164
  /* Can't use Data_Get_Struct because that triggers the event hook
149
165
  ending up in endless recursion. */
150
- prof_allocation_t* result = DATA_PTR(self);
166
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
151
167
  if (!result)
152
168
  rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
153
169
 
@@ -158,8 +174,7 @@ prof_allocation_get(VALUE self)
158
174
  klass -> Class
159
175
 
160
176
  Returns the type of Class being allocated. */
161
- static VALUE
162
- prof_allocation_klass_name(VALUE self)
177
+ static VALUE prof_allocation_klass_name(VALUE self)
163
178
  {
164
179
  prof_allocation_t* allocation = prof_allocation_get(self);
165
180
 
@@ -174,8 +189,7 @@ prof_allocation_klass_name(VALUE self)
174
189
 
175
190
  Returns the klass flags */
176
191
 
177
- static VALUE
178
- prof_allocation_klass_flags(VALUE self)
192
+ static VALUE prof_allocation_klass_flags(VALUE self)
179
193
  {
180
194
  prof_allocation_t* allocation = prof_allocation_get(self);
181
195
  return INT2FIX(allocation->klass_flags);
@@ -185,8 +199,7 @@ prof_allocation_klass_flags(VALUE self)
185
199
  source_file -> string
186
200
 
187
201
  Returns the the line number where objects were allocated. */
188
- static VALUE
189
- prof_allocation_source_file(VALUE self)
202
+ static VALUE prof_allocation_source_file(VALUE self)
190
203
  {
191
204
  prof_allocation_t* allocation = prof_allocation_get(self);
192
205
  return allocation->source_file;
@@ -196,8 +209,7 @@ prof_allocation_source_file(VALUE self)
196
209
  line -> number
197
210
 
198
211
  Returns the the line number where objects were allocated. */
199
- static VALUE
200
- prof_allocation_source_line(VALUE self)
212
+ static VALUE prof_allocation_source_line(VALUE self)
201
213
  {
202
214
  prof_allocation_t* allocation = prof_allocation_get(self);
203
215
  return INT2FIX(allocation->source_line);
@@ -207,8 +219,7 @@ prof_allocation_source_line(VALUE self)
207
219
  count -> number
208
220
 
209
221
  Returns the number of times this class has been allocated. */
210
- static VALUE
211
- prof_allocation_count(VALUE self)
222
+ static VALUE prof_allocation_count(VALUE self)
212
223
  {
213
224
  prof_allocation_t* allocation = prof_allocation_get(self);
214
225
  return INT2FIX(allocation->count);
@@ -218,18 +229,16 @@ prof_allocation_count(VALUE self)
218
229
  memory -> number
219
230
 
220
231
  Returns the amount of memory allocated. */
221
- static VALUE
222
- prof_allocation_memory(VALUE self)
232
+ static VALUE prof_allocation_memory(VALUE self)
223
233
  {
224
234
  prof_allocation_t* allocation = prof_allocation_get(self);
225
235
  return ULL2NUM(allocation->memory);
226
236
  }
227
237
 
228
238
  /* :nodoc: */
229
- static VALUE
230
- prof_allocation_dump(VALUE self)
239
+ static VALUE prof_allocation_dump(VALUE self)
231
240
  {
232
- prof_allocation_t* allocation = DATA_PTR(self);
241
+ prof_allocation_t* allocation = prof_get_allocation(self);
233
242
 
234
243
  VALUE result = rb_hash_new();
235
244
 
@@ -245,10 +254,9 @@ prof_allocation_dump(VALUE self)
245
254
  }
246
255
 
247
256
  /* :nodoc: */
248
- static VALUE
249
- prof_allocation_load(VALUE self, VALUE data)
257
+ static VALUE prof_allocation_load(VALUE self, VALUE data)
250
258
  {
251
- prof_allocation_t* allocation = DATA_PTR(self);
259
+ prof_allocation_t* allocation = prof_get_allocation(self);
252
260
  allocation->object = self;
253
261
 
254
262
  allocation->key = FIX2LONG(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
@@ -264,7 +272,7 @@ prof_allocation_load(VALUE self, VALUE data)
264
272
 
265
273
  void rp_init_allocation(void)
266
274
  {
267
- cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cData);
275
+ cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cObject);
268
276
  rb_undef_method(CLASS_OF(cRpAllocation), "new");
269
277
  rb_define_alloc_func(cRpAllocation, prof_allocation_allocate);
270
278
 
@@ -7,7 +7,7 @@
7
7
  #include "ruby_prof.h"
8
8
  #include "rp_method.h"
9
9
 
10
- typedef struct
10
+ typedef struct prof_allocation_t
11
11
  {
12
12
  st_data_t key; /* Key in hash table */
13
13
  unsigned int klass_flags; /* Information about the type of class */
@@ -22,10 +22,10 @@ typedef struct
22
22
 
23
23
  void rp_init_allocation(void);
24
24
  void prof_allocation_free(prof_allocation_t* allocation);
25
- void prof_allocation_mark(void *data);
25
+ void prof_allocation_mark(void* data);
26
26
  VALUE prof_allocation_wrap(prof_allocation_t* allocation);
27
27
  prof_allocation_t* prof_allocation_get(VALUE self);
28
- prof_allocation_t* prof_allocate_increment(prof_method_t *method, rb_trace_arg_t *trace_arg);
28
+ prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg);
29
29
 
30
30
 
31
31
  #endif //_RP_ALLOCATION_
@@ -0,0 +1,369 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_call_tree.h"
5
+
6
+ #define INITIAL_CALL_TREES_SIZE 2
7
+
8
+ VALUE cRpCallTree;
9
+
10
+ /* ======= prof_call_tree_t ========*/
11
+ prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t* parent, VALUE source_file, int source_line)
12
+ {
13
+ prof_call_tree_t* result = ALLOC(prof_call_tree_t);
14
+ result->method = method;
15
+ result->parent = parent;
16
+ result->object = Qnil;
17
+ result->visits = 0;
18
+ result->source_line = source_line;
19
+ result->source_file = source_file;
20
+ result->children = rb_st_init_numtable();
21
+ result->measurement = prof_measurement_create();
22
+
23
+ return result;
24
+ }
25
+
26
+ prof_call_tree_t* prof_call_tree_copy(prof_call_tree_t* other)
27
+ {
28
+ prof_call_tree_t* result = ALLOC(prof_call_tree_t);
29
+ result->children = rb_st_init_numtable();
30
+ result->object = Qnil;
31
+ result->visits = 0;
32
+
33
+ result->method = other->method;
34
+ result->parent = other->parent;
35
+ result->source_line = other->source_line;
36
+ result->source_file = other->source_file;
37
+
38
+ result->measurement = prof_measurement_create();
39
+ result->measurement->called = other->measurement->called;
40
+ result->measurement->total_time = other->measurement->total_time;
41
+ result->measurement->self_time = other->measurement->self_time;
42
+ result->measurement->wait_time = other->measurement->wait_time;
43
+ result->measurement->object = Qnil;
44
+
45
+ return result;
46
+ }
47
+
48
+ void prof_call_tree_merge(prof_call_tree_t* result, prof_call_tree_t* other)
49
+ {
50
+ result->measurement->called += other->measurement->called;
51
+ result->measurement->total_time += other->measurement->total_time;
52
+ result->measurement->self_time += other->measurement->self_time;
53
+ result->measurement->wait_time += other->measurement->wait_time;
54
+ }
55
+
56
+ static int prof_call_tree_collect_children(st_data_t key, st_data_t value, st_data_t result)
57
+ {
58
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
59
+ VALUE arr = (VALUE)result;
60
+ rb_ary_push(arr, prof_call_tree_wrap(call_tree));
61
+ return ST_CONTINUE;
62
+ }
63
+
64
+ static int prof_call_tree_mark_children(st_data_t key, st_data_t value, st_data_t data)
65
+ {
66
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
67
+ rb_st_foreach(call_tree->children, prof_call_tree_mark_children, data);
68
+ prof_call_tree_mark(call_tree);
69
+ return ST_CONTINUE;
70
+ }
71
+
72
+ void prof_call_tree_mark(void* data)
73
+ {
74
+ if (!data)
75
+ return;
76
+
77
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
78
+
79
+ if (call_tree->object != Qnil)
80
+ rb_gc_mark(call_tree->object);
81
+
82
+ if (call_tree->source_file != Qnil)
83
+ rb_gc_mark(call_tree->source_file);
84
+
85
+ prof_method_mark(call_tree->method);
86
+ prof_measurement_mark(call_tree->measurement);
87
+
88
+ // Recurse down through the whole call tree but only from the top node
89
+ // to avoid calling mark over and over and over.
90
+ if (!call_tree->parent)
91
+ rb_st_foreach(call_tree->children, prof_call_tree_mark_children, 0);
92
+ }
93
+
94
+ static void prof_call_tree_ruby_gc_free(void* data)
95
+ {
96
+ if (data)
97
+ {
98
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
99
+ call_tree->object = Qnil;
100
+ }
101
+ }
102
+
103
+ static int prof_call_tree_free_children(st_data_t key, st_data_t value, st_data_t data)
104
+ {
105
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
106
+ prof_call_tree_free(call_tree);
107
+ return ST_CONTINUE;
108
+ }
109
+
110
+ void prof_call_tree_free(prof_call_tree_t* call_tree_data)
111
+ {
112
+ /* Has this call info object been accessed by Ruby? If
113
+ yes clean it up so to avoid a segmentation fault. */
114
+ if (call_tree_data->object != Qnil)
115
+ {
116
+ RTYPEDDATA(call_tree_data->object)->data = NULL;
117
+ call_tree_data->object = Qnil;
118
+ }
119
+
120
+ // Free children
121
+ rb_st_foreach(call_tree_data->children, prof_call_tree_free_children, 0);
122
+ rb_st_free_table(call_tree_data->children);
123
+
124
+ // Free measurement
125
+ prof_measurement_free(call_tree_data->measurement);
126
+
127
+ // Finally free self
128
+ xfree(call_tree_data);
129
+ }
130
+
131
+ size_t prof_call_tree_size(const void* data)
132
+ {
133
+ return sizeof(prof_call_tree_t);
134
+ }
135
+
136
+ static const rb_data_type_t call_tree_type =
137
+ {
138
+ .wrap_struct_name = "CallTree",
139
+ .function =
140
+ {
141
+ .dmark = prof_call_tree_mark,
142
+ .dfree = prof_call_tree_ruby_gc_free,
143
+ .dsize = prof_call_tree_size,
144
+ },
145
+ .data = NULL,
146
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
147
+ };
148
+
149
+ VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree)
150
+ {
151
+ if (call_tree->object == Qnil)
152
+ {
153
+ call_tree->object = TypedData_Wrap_Struct(cRpCallTree, &call_tree_type, call_tree);
154
+ }
155
+ return call_tree->object;
156
+ }
157
+
158
+ static VALUE prof_call_tree_allocate(VALUE klass)
159
+ {
160
+ prof_call_tree_t* call_tree = prof_call_tree_create(NULL, NULL, Qnil, 0);
161
+ call_tree->object = prof_call_tree_wrap(call_tree);
162
+ return call_tree->object;
163
+ }
164
+
165
+ prof_call_tree_t* prof_get_call_tree(VALUE self)
166
+ {
167
+ /* Can't use Data_Get_Struct because that triggers the event hook
168
+ ending up in endless recursion. */
169
+ prof_call_tree_t* result = RTYPEDDATA_DATA(self);
170
+
171
+ if (!result)
172
+ rb_raise(rb_eRuntimeError, "This RubyProf::CallTree instance has already been freed, likely because its profile has been freed.");
173
+
174
+ return result;
175
+ }
176
+
177
+ /* ======= Call Tree Table ========*/
178
+ static size_t call_tree_table_insert(st_table* table, st_data_t key, prof_call_tree_t* val)
179
+ {
180
+ return rb_st_insert(table, (st_data_t)key, (st_data_t)val);
181
+ }
182
+
183
+ prof_call_tree_t* call_tree_table_lookup(st_table* table, st_data_t key)
184
+ {
185
+ st_data_t val;
186
+ if (rb_st_lookup(table, (st_data_t)key, &val))
187
+ {
188
+ return (prof_call_tree_t*)val;
189
+ }
190
+ else
191
+ {
192
+ return NULL;
193
+ }
194
+ }
195
+
196
+ uint32_t prof_call_figure_depth(prof_call_tree_t* call_tree_data)
197
+ {
198
+ uint32_t result = 0;
199
+
200
+ while (call_tree_data->parent)
201
+ {
202
+ result++;
203
+ call_tree_data = call_tree_data->parent;
204
+ }
205
+
206
+ return result;
207
+ }
208
+
209
+ void prof_call_tree_add_parent(prof_call_tree_t* self, prof_call_tree_t* parent)
210
+ {
211
+ prof_call_tree_add_child(parent, self);
212
+ self->parent = parent;
213
+ }
214
+
215
+ void prof_call_tree_add_child(prof_call_tree_t* self, prof_call_tree_t* child)
216
+ {
217
+ call_tree_table_insert(self->children, child->method->key, child);
218
+ }
219
+
220
+ /* ======= RubyProf::CallTree ========*/
221
+
222
+ /* call-seq:
223
+ parent -> call_tree
224
+
225
+ Returns the CallTree parent call_tree object (the method that called this method).*/
226
+ static VALUE prof_call_tree_parent(VALUE self)
227
+ {
228
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
229
+ if (call_tree->parent)
230
+ return prof_call_tree_wrap(call_tree->parent);
231
+ else
232
+ return Qnil;
233
+ }
234
+
235
+ /* call-seq:
236
+ callees -> array
237
+
238
+ Returns an array of call info objects that this method called (ie, children).*/
239
+ static VALUE prof_call_tree_children(VALUE self)
240
+ {
241
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
242
+ VALUE result = rb_ary_new();
243
+ rb_st_foreach(call_tree->children, prof_call_tree_collect_children, result);
244
+ return result;
245
+ }
246
+
247
+ /* call-seq:
248
+ called -> MethodInfo
249
+
250
+ Returns the target method. */
251
+ static VALUE prof_call_tree_target(VALUE self)
252
+ {
253
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
254
+ return prof_method_wrap(call_tree->method);
255
+ }
256
+
257
+ /* call-seq:
258
+ called -> Measurement
259
+
260
+ Returns the measurement associated with this call_tree. */
261
+ static VALUE prof_call_tree_measurement(VALUE self)
262
+ {
263
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
264
+ return prof_measurement_wrap(call_tree->measurement);
265
+ }
266
+
267
+ /* call-seq:
268
+ depth -> int
269
+
270
+ returns the depth of this call info in the call graph */
271
+ static VALUE prof_call_tree_depth(VALUE self)
272
+ {
273
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(self);
274
+ uint32_t depth = prof_call_figure_depth(call_tree_data);
275
+ return rb_int_new(depth);
276
+ }
277
+
278
+ /* call-seq:
279
+ source_file => string
280
+
281
+ return the source file of the method
282
+ */
283
+ static VALUE prof_call_tree_source_file(VALUE self)
284
+ {
285
+ prof_call_tree_t* result = prof_get_call_tree(self);
286
+ return result->source_file;
287
+ }
288
+
289
+ /* call-seq:
290
+ line_no -> int
291
+
292
+ returns the line number of the method */
293
+ static VALUE prof_call_tree_line(VALUE self)
294
+ {
295
+ prof_call_tree_t* result = prof_get_call_tree(self);
296
+ return INT2FIX(result->source_line);
297
+ }
298
+
299
+ /* :nodoc: */
300
+ static VALUE prof_call_tree_dump(VALUE self)
301
+ {
302
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(self);
303
+ VALUE result = rb_hash_new();
304
+
305
+ rb_hash_aset(result, ID2SYM(rb_intern("measurement")), prof_measurement_wrap(call_tree_data->measurement));
306
+
307
+ rb_hash_aset(result, ID2SYM(rb_intern("source_file")), call_tree_data->source_file);
308
+ rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(call_tree_data->source_line));
309
+
310
+ rb_hash_aset(result, ID2SYM(rb_intern("parent")), prof_call_tree_parent(self));
311
+ rb_hash_aset(result, ID2SYM(rb_intern("children")), prof_call_tree_children(self));
312
+ rb_hash_aset(result, ID2SYM(rb_intern("target")), prof_call_tree_target(self));
313
+
314
+ return result;
315
+ }
316
+
317
+ /* :nodoc: */
318
+ static VALUE prof_call_tree_load(VALUE self, VALUE data)
319
+ {
320
+ VALUE target = Qnil;
321
+ VALUE parent = Qnil;
322
+ prof_call_tree_t* call_tree = prof_get_call_tree(self);
323
+ call_tree->object = self;
324
+
325
+ VALUE measurement = rb_hash_aref(data, ID2SYM(rb_intern("measurement")));
326
+ call_tree->measurement = prof_get_measurement(measurement);
327
+
328
+ call_tree->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
329
+ call_tree->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
330
+
331
+ parent = rb_hash_aref(data, ID2SYM(rb_intern("parent")));
332
+ if (parent != Qnil)
333
+ call_tree->parent = prof_get_call_tree(parent);
334
+
335
+ VALUE callees = rb_hash_aref(data, ID2SYM(rb_intern("children")));
336
+ for (int i = 0; i < rb_array_len(callees); i++)
337
+ {
338
+ VALUE call_tree_object = rb_ary_entry(callees, i);
339
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(call_tree_object);
340
+
341
+ st_data_t key = call_tree_data->method ? call_tree_data->method->key : method_key(Qnil, 0);
342
+ call_tree_table_insert(call_tree->children, key, call_tree_data);
343
+ }
344
+
345
+ target = rb_hash_aref(data, ID2SYM(rb_intern("target")));
346
+ call_tree->method = prof_get_method(target);
347
+
348
+ return data;
349
+ }
350
+
351
+ void rp_init_call_tree()
352
+ {
353
+ /* CallTree */
354
+ cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cObject);
355
+ rb_undef_method(CLASS_OF(cRpCallTree), "new");
356
+ rb_define_alloc_func(cRpCallTree, prof_call_tree_allocate);
357
+
358
+ rb_define_method(cRpCallTree, "parent", prof_call_tree_parent, 0);
359
+ rb_define_method(cRpCallTree, "children", prof_call_tree_children, 0);
360
+ rb_define_method(cRpCallTree, "target", prof_call_tree_target, 0);
361
+ rb_define_method(cRpCallTree, "measurement", prof_call_tree_measurement, 0);
362
+
363
+ rb_define_method(cRpCallTree, "depth", prof_call_tree_depth, 0);
364
+ rb_define_method(cRpCallTree, "source_file", prof_call_tree_source_file, 0);
365
+ rb_define_method(cRpCallTree, "line", prof_call_tree_line, 0);
366
+
367
+ rb_define_method(cRpCallTree, "_dump_data", prof_call_tree_dump, 0);
368
+ rb_define_method(cRpCallTree, "_load_data", prof_call_tree_load, 1);
369
+ }