ruby-prof 1.1.0 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (74) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES +12 -1
  3. data/bin/ruby-prof +100 -152
  4. data/ext/ruby_prof/rp_aggregate_call_tree.c +41 -0
  5. data/ext/ruby_prof/rp_aggregate_call_tree.h +13 -0
  6. data/ext/ruby_prof/rp_allocation.c +31 -51
  7. data/ext/ruby_prof/rp_allocation.h +2 -2
  8. data/ext/ruby_prof/rp_call_tree.c +353 -0
  9. data/ext/ruby_prof/rp_call_tree.h +43 -0
  10. data/ext/ruby_prof/rp_call_trees.c +266 -0
  11. data/ext/ruby_prof/rp_call_trees.h +29 -0
  12. data/ext/ruby_prof/rp_measure_allocations.c +11 -13
  13. data/ext/ruby_prof/rp_measure_process_time.c +11 -13
  14. data/ext/ruby_prof/rp_measure_wall_time.c +17 -15
  15. data/ext/ruby_prof/rp_measurement.c +27 -36
  16. data/ext/ruby_prof/rp_measurement.h +6 -6
  17. data/ext/ruby_prof/rp_method.c +88 -248
  18. data/ext/ruby_prof/rp_method.h +12 -19
  19. data/ext/ruby_prof/rp_profile.c +277 -270
  20. data/ext/ruby_prof/rp_profile.h +0 -1
  21. data/ext/ruby_prof/rp_stack.c +113 -105
  22. data/ext/ruby_prof/rp_stack.h +15 -18
  23. data/ext/ruby_prof/rp_thread.c +115 -107
  24. data/ext/ruby_prof/rp_thread.h +9 -8
  25. data/ext/ruby_prof/ruby_prof.c +27 -23
  26. data/ext/ruby_prof/ruby_prof.h +9 -0
  27. data/ext/ruby_prof/vc/ruby_prof.vcxproj +11 -7
  28. data/lib/ruby-prof.rb +2 -3
  29. data/lib/ruby-prof/assets/call_stack_printer.html.erb +4 -7
  30. data/lib/ruby-prof/assets/graph_printer.html.erb +5 -6
  31. data/lib/ruby-prof/{call_info.rb → call_tree.rb} +6 -6
  32. data/lib/ruby-prof/call_tree_visitor.rb +36 -0
  33. data/lib/ruby-prof/measurement.rb +5 -2
  34. data/lib/ruby-prof/method_info.rb +3 -15
  35. data/lib/ruby-prof/printers/call_info_printer.rb +12 -10
  36. data/lib/ruby-prof/printers/call_stack_printer.rb +19 -22
  37. data/lib/ruby-prof/printers/call_tree_printer.rb +1 -1
  38. data/lib/ruby-prof/printers/dot_printer.rb +3 -3
  39. data/lib/ruby-prof/printers/graph_printer.rb +3 -4
  40. data/lib/ruby-prof/printers/multi_printer.rb +2 -2
  41. data/lib/ruby-prof/rack.rb +3 -0
  42. data/lib/ruby-prof/thread.rb +3 -18
  43. data/lib/ruby-prof/version.rb +1 -1
  44. data/ruby-prof.gemspec +7 -0
  45. data/test/alias_test.rb +42 -45
  46. data/test/basic_test.rb +0 -86
  47. data/test/{call_info_visitor_test.rb → call_tree_visitor_test.rb} +6 -5
  48. data/test/call_trees_test.rb +66 -0
  49. data/test/exclude_methods_test.rb +17 -12
  50. data/test/fiber_test.rb +203 -6
  51. data/test/gc_test.rb +32 -23
  52. data/test/inverse_call_tree_test.rb +175 -0
  53. data/test/line_number_test.rb +64 -67
  54. data/test/marshal_test.rb +7 -11
  55. data/test/measure_allocations_test.rb +224 -234
  56. data/test/measure_allocations_trace_test.rb +224 -234
  57. data/test/measure_memory_trace_test.rb +814 -469
  58. data/test/measure_process_time_test.rb +0 -64
  59. data/test/measure_times.rb +2 -0
  60. data/test/measure_wall_time_test.rb +34 -58
  61. data/test/pause_resume_test.rb +19 -10
  62. data/test/prime.rb +1 -3
  63. data/test/prime_script.rb +6 -0
  64. data/test/printers_test.rb +1 -1
  65. data/test/recursive_test.rb +50 -54
  66. data/test/start_stop_test.rb +19 -19
  67. data/test/test_helper.rb +3 -15
  68. data/test/thread_test.rb +11 -11
  69. data/test/unique_call_path_test.rb +25 -95
  70. metadata +19 -9
  71. data/ext/ruby_prof/rp_call_info.c +0 -271
  72. data/ext/ruby_prof/rp_call_info.h +0 -35
  73. data/lib/ruby-prof/call_info_visitor.rb +0 -38
  74. data/test/parser_timings.rb +0 -24
@@ -0,0 +1,29 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #ifndef __RP_CALL_TREES_H__
5
+ #define __RP_CALL_TREES_H__
6
+
7
+ #include "ruby_prof.h"
8
+ #include "rp_call_tree.h"
9
+
10
+ /* Array of call_tree objects */
11
+ typedef struct prof_call_trees_t
12
+ {
13
+ prof_call_tree_t** start;
14
+ prof_call_tree_t** end;
15
+ prof_call_tree_t** ptr;
16
+
17
+ VALUE object;
18
+ } prof_call_trees_t;
19
+
20
+
21
+ void rp_init_call_trees();
22
+ prof_call_trees_t* prof_call_trees_create();
23
+ void prof_call_trees_mark(prof_call_trees_t* call_trees);
24
+ void prof_call_trees_free(prof_call_trees_t* call_trees);
25
+ prof_call_trees_t* prof_get_call_trees(VALUE self);
26
+ void prof_add_call_tree(prof_call_trees_t* call_trees, prof_call_tree_t* call_tree);
27
+ VALUE prof_call_trees_wrap(prof_call_trees_t* call_trees);
28
+
29
+ #endif //__RP_CALL_TREES_H__
@@ -8,14 +8,12 @@
8
8
  static VALUE cMeasureAllocations;
9
9
  VALUE total_allocated_objects_key;
10
10
 
11
- static double
12
- measure_allocations_via_gc_stats(rb_trace_arg_t* trace_arg)
11
+ static double measure_allocations_via_gc_stats(rb_trace_arg_t* trace_arg)
13
12
  {
14
13
  return rb_gc_stat(total_allocated_objects_key);
15
14
  }
16
15
 
17
- static double
18
- measure_allocations_via_tracing(rb_trace_arg_t* trace_arg)
16
+ static double measure_allocations_via_tracing(rb_trace_arg_t* trace_arg)
19
17
  {
20
18
  static double result = 0;
21
19
 
@@ -30,17 +28,17 @@ measure_allocations_via_tracing(rb_trace_arg_t* trace_arg)
30
28
 
31
29
  prof_measurer_t* prof_measurer_allocations(bool track_allocations)
32
30
  {
33
- prof_measurer_t* measure = ALLOC(prof_measurer_t);
34
- measure->mode = MEASURE_ALLOCATIONS;
35
- measure->multiplier = 1;
36
- measure->track_allocations = track_allocations;
31
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
32
+ measure->mode = MEASURE_ALLOCATIONS;
33
+ measure->multiplier = 1;
34
+ measure->track_allocations = track_allocations;
37
35
 
38
- if (track_allocations)
39
- measure->measure = measure_allocations_via_tracing;
40
- else
41
- measure->measure = measure_allocations_via_gc_stats;
36
+ if (track_allocations)
37
+ measure->measure = measure_allocations_via_tracing;
38
+ else
39
+ measure->measure = measure_allocations_via_gc_stats;
42
40
 
43
- return measure;
41
+ return measure;
44
42
  }
45
43
 
46
44
  void rp_init_measure_allocations()
@@ -6,8 +6,7 @@
6
6
 
7
7
  static VALUE cMeasureProcessTime;
8
8
 
9
- static double
10
- measure_process_time(rb_trace_arg_t* trace_arg)
9
+ static double measure_process_time(rb_trace_arg_t* trace_arg)
11
10
  {
12
11
  #if defined(_WIN32)
13
12
  FILETIME createTime;
@@ -29,16 +28,15 @@ measure_process_time(rb_trace_arg_t* trace_arg)
29
28
  #elif !defined(CLOCK_PROCESS_CPUTIME_ID)
30
29
  struct rusage usage;
31
30
  getrusage(RUSAGE_SELF, &usage);
32
- return usage.ru_stime.tv_sec + usage.ru_utime.tv_sec + ((usage.ru_stime.tv_usec + usage.ru_utime.tv_usec)/1000000.0);
31
+ return usage.ru_stime.tv_sec + usage.ru_utime.tv_sec + ((usage.ru_stime.tv_usec + usage.ru_utime.tv_usec) / 1000000.0);
33
32
  #else
34
33
  struct timespec clock;
35
- clock_gettime(CLOCK_PROCESS_CPUTIME_ID , &clock);
36
- return clock.tv_sec + (clock.tv_nsec/1000000000.0);
34
+ clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &clock);
35
+ return clock.tv_sec + (clock.tv_nsec / 1000000000.0);
37
36
  #endif
38
37
  }
39
38
 
40
- static double
41
- multiplier_process_time(void)
39
+ static double multiplier_process_time(void)
42
40
  {
43
41
  #if defined(_WIN32)
44
42
  // Times are in 100-nanosecond time units. So instead of 10-9 use 10-7
@@ -50,12 +48,12 @@ multiplier_process_time(void)
50
48
 
51
49
  prof_measurer_t* prof_measurer_process_time(bool track_allocations)
52
50
  {
53
- prof_measurer_t* measure = ALLOC(prof_measurer_t);
54
- measure->mode = MEASURE_PROCESS_TIME;
55
- measure->measure = measure_process_time;
56
- measure->multiplier = multiplier_process_time();
57
- measure->track_allocations = track_allocations;
58
- return measure;
51
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
52
+ measure->mode = MEASURE_PROCESS_TIME;
53
+ measure->measure = measure_process_time;
54
+ measure->multiplier = multiplier_process_time();
55
+ measure->track_allocations = track_allocations;
56
+ return measure;
59
57
  }
60
58
 
61
59
  void rp_init_measure_process_time()
@@ -1,7 +1,7 @@
1
1
  /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
2
  Please see the LICENSE file for copyright and distribution information */
3
3
 
4
- /* :nodoc: */
4
+ /* :nodoc: */
5
5
  #include "rp_measurement.h"
6
6
 
7
7
  #if defined(__APPLE__)
@@ -12,11 +12,12 @@
12
12
 
13
13
  static VALUE cMeasureWallTime;
14
14
 
15
- static double
16
- measure_wall_time(rb_trace_arg_t* trace_arg)
15
+ static double measure_wall_time(rb_trace_arg_t* trace_arg)
17
16
  {
18
17
  #if defined(_WIN32)
19
- return GetTickCount();
18
+ LARGE_INTEGER time;
19
+ QueryPerformanceCounter(&time);
20
+ return time.QuadPart;
20
21
  #elif defined(__APPLE__)
21
22
  return mach_absolute_time();// * (uint64_t)mach_timebase.numer / (uint64_t)mach_timebase.denom;
22
23
  #elif defined(__linux__)
@@ -26,18 +27,19 @@ measure_wall_time(rb_trace_arg_t* trace_arg)
26
27
  #else
27
28
  struct timeval tv;
28
29
  gettimeofday(&tv, NULL);
29
- return tv.tv_sec + (tv.tv_usec / 1000000.0);
30
+ return tv.tv_sec + (tv.tv_usec / 1000000.0);
30
31
  #endif
31
32
  }
32
33
 
33
- static double
34
- multiplier_wall_time(void)
34
+ static double multiplier_wall_time(void)
35
35
  {
36
36
  #if defined(_WIN32)
37
- return 1.0/1000.0;
37
+ LARGE_INTEGER frequency;
38
+ QueryPerformanceFrequency(&frequency);
39
+ return 1.0 / frequency.QuadPart;
38
40
  #elif defined(__APPLE__)
39
41
  mach_timebase_info_data_t mach_timebase;
40
- mach_timebase_info (&mach_timebase);
42
+ mach_timebase_info(&mach_timebase);
41
43
  return (uint64_t)mach_timebase.numer / (uint64_t)mach_timebase.denom / 1000000000.0;
42
44
  #else
43
45
  return 1.0;
@@ -46,12 +48,12 @@ multiplier_wall_time(void)
46
48
 
47
49
  prof_measurer_t* prof_measurer_wall_time(bool track_allocations)
48
50
  {
49
- prof_measurer_t* measure = ALLOC(prof_measurer_t);
50
- measure->mode = MEASURE_WALL_TIME;
51
- measure->measure = measure_wall_time;
52
- measure->multiplier = multiplier_wall_time();
53
- measure->track_allocations = track_allocations;
54
- return measure;
51
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
52
+ measure->mode = MEASURE_WALL_TIME;
53
+ measure->measure = measure_wall_time;
54
+ measure->multiplier = multiplier_wall_time();
55
+ measure->track_allocations = track_allocations;
56
+ return measure;
55
57
  }
56
58
 
57
59
  void rp_init_measure_wall_time()
@@ -40,7 +40,7 @@ double prof_measure(prof_measurer_t* measurer, rb_trace_arg_t* trace_arg)
40
40
  }
41
41
 
42
42
  /* ======= prof_measurement_t ========*/
43
- prof_measurement_t *prof_measurement_create(void)
43
+ prof_measurement_t* prof_measurement_create(void)
44
44
  {
45
45
  prof_measurement_t* result = ALLOC(prof_measurement_t);
46
46
  result->total_time = 0;
@@ -51,11 +51,23 @@ prof_measurement_t *prof_measurement_create(void)
51
51
  return result;
52
52
  }
53
53
 
54
- static void
55
- prof_measurement_ruby_gc_free(void *data)
54
+ void prof_measurement_mark(void* data)
56
55
  {
56
+ prof_measurement_t* measurement_data = (prof_measurement_t*)data;
57
+
58
+ if (measurement_data->object != Qnil)
59
+ rb_gc_mark(measurement_data->object);
60
+ }
61
+
62
+ static void prof_measurement_ruby_gc_free(void* data)
63
+ {
64
+ // Measurements are freed by their owning object (call info or method)
57
65
  prof_measurement_t* measurement = (prof_measurement_t*)data;
66
+ measurement->object = Qnil;
67
+ }
58
68
 
69
+ void prof_measurement_free(prof_measurement_t* measurement)
70
+ {
59
71
  /* Has this measurement object been accessed by Ruby? If
60
72
  yes clean it up so to avoid a segmentation fault. */
61
73
  if (measurement->object != Qnil)
@@ -65,49 +77,32 @@ prof_measurement_ruby_gc_free(void *data)
65
77
  RDATA(measurement->object)->data = NULL;
66
78
  measurement->object = Qnil;
67
79
  }
68
- }
69
80
 
70
- void
71
- prof_measurement_free(prof_measurement_t* measurement)
72
- {
73
- prof_measurement_ruby_gc_free(measurement);
74
81
  xfree(measurement);
75
82
  }
76
83
 
77
- size_t
78
- prof_measurement_size(const void *data)
84
+ size_t prof_measurement_size(const void* data)
79
85
  {
80
86
  return sizeof(prof_measurement_t);
81
87
  }
82
88
 
83
- void
84
- prof_measurement_mark(void *data)
85
- {
86
- prof_measurement_t* measurement = (prof_measurement_t*)data;
87
- if (measurement->object != Qnil)
88
- rb_gc_mark(measurement->object);
89
- }
90
-
91
- VALUE
92
- prof_measurement_wrap(prof_measurement_t* measurement)
89
+ VALUE prof_measurement_wrap(prof_measurement_t* measurement)
93
90
  {
94
91
  if (measurement->object == Qnil)
95
92
  {
96
- measurement->object = Data_Wrap_Struct(cRpMeasurement, prof_measurement_mark, prof_measurement_ruby_gc_free, measurement);
93
+ measurement->object = Data_Wrap_Struct(cRpMeasurement, NULL, prof_measurement_ruby_gc_free, measurement);
97
94
  }
98
95
  return measurement->object;
99
96
  }
100
97
 
101
- static VALUE
102
- prof_measurement_allocate(VALUE klass)
98
+ static VALUE prof_measurement_allocate(VALUE klass)
103
99
  {
104
- prof_measurement_t *measurement = prof_measurement_create();
100
+ prof_measurement_t* measurement = prof_measurement_create();
105
101
  measurement->object = prof_measurement_wrap(measurement);
106
102
  return measurement->object;
107
103
  }
108
104
 
109
- prof_measurement_t*
110
- prof_get_measurement(VALUE self)
105
+ prof_measurement_t* prof_get_measurement(VALUE self)
111
106
  {
112
107
  /* Can't use Data_Get_Struct because that triggers the event hook
113
108
  ending up in endless recursion. */
@@ -123,8 +118,7 @@ prof_get_measurement(VALUE self)
123
118
  total_time -> float
124
119
 
125
120
  Returns the total amount of time spent in this method and its children. */
126
- static VALUE
127
- prof_measurement_total_time(VALUE self)
121
+ static VALUE prof_measurement_total_time(VALUE self)
128
122
  {
129
123
  prof_measurement_t* result = prof_get_measurement(self);
130
124
  return rb_float_new(result->total_time);
@@ -146,8 +140,7 @@ prof_measurement_self_time(VALUE self)
146
140
  wait_time -> float
147
141
 
148
142
  Returns the total amount of time this method waited for other threads. */
149
- static VALUE
150
- prof_measurement_wait_time(VALUE self)
143
+ static VALUE prof_measurement_wait_time(VALUE self)
151
144
  {
152
145
  prof_measurement_t* result = prof_get_measurement(self);
153
146
 
@@ -158,10 +151,9 @@ prof_measurement_wait_time(VALUE self)
158
151
  called -> int
159
152
 
160
153
  Returns the total amount of times this method was called. */
161
- static VALUE
162
- prof_measurement_called(VALUE self)
154
+ static VALUE prof_measurement_called(VALUE self)
163
155
  {
164
- prof_measurement_t *result = prof_get_measurement(self);
156
+ prof_measurement_t* result = prof_get_measurement(self);
165
157
  return INT2NUM(result->called);
166
158
  }
167
159
 
@@ -169,10 +161,9 @@ prof_measurement_called(VALUE self)
169
161
  called=n -> n
170
162
 
171
163
  Sets the call count to n. */
172
- static VALUE
173
- prof_measurement_set_called(VALUE self, VALUE called)
164
+ static VALUE prof_measurement_set_called(VALUE self, VALUE called)
174
165
  {
175
- prof_measurement_t *result = prof_get_measurement(self);
166
+ prof_measurement_t* result = prof_get_measurement(self);
176
167
  result->called = NUM2INT(called);
177
168
  return called;
178
169
  }
@@ -8,7 +8,7 @@
8
8
 
9
9
  extern VALUE mMeasure;
10
10
 
11
- typedef double (*get_measurement)(rb_trace_arg_t *trace_arg);
11
+ typedef double (*get_measurement)(rb_trace_arg_t* trace_arg);
12
12
 
13
13
  typedef enum
14
14
  {
@@ -36,14 +36,14 @@ typedef struct prof_measurement_t
36
36
  VALUE object;
37
37
  } prof_measurement_t;
38
38
 
39
- prof_measurer_t *prof_get_measurer(prof_measure_mode_t measure, bool track_allocations);
40
- double prof_measure(prof_measurer_t *measurer, rb_trace_arg_t* trace_arg);
39
+ prof_measurer_t* prof_get_measurer(prof_measure_mode_t measure, bool track_allocations);
40
+ double prof_measure(prof_measurer_t* measurer, rb_trace_arg_t* trace_arg);
41
41
 
42
- prof_measurement_t *prof_measurement_create(void);
42
+ prof_measurement_t* prof_measurement_create(void);
43
43
  void prof_measurement_free(prof_measurement_t* measurement);
44
- VALUE prof_measurement_wrap(prof_measurement_t *measurement);
44
+ VALUE prof_measurement_wrap(prof_measurement_t* measurement);
45
45
  prof_measurement_t* prof_get_measurement(VALUE self);
46
- void prof_measurement_mark(void *data);
46
+ void prof_measurement_mark(void* data);
47
47
 
48
48
  void rp_init_measure(void);
49
49
 
@@ -2,14 +2,13 @@
2
2
  Please see the LICENSE file for copyright and distribution information */
3
3
 
4
4
  #include "rp_allocation.h"
5
- #include "rp_call_info.h"
5
+ #include "rp_call_trees.h"
6
6
  #include "rp_method.h"
7
7
 
8
8
  VALUE cRpMethodInfo;
9
9
 
10
10
  /* ================ Helper Functions =================*/
11
- VALUE
12
- resolve_klass(VALUE klass, unsigned int *klass_flags)
11
+ VALUE resolve_klass(VALUE klass, unsigned int* klass_flags)
13
12
  {
14
13
  VALUE result = klass;
15
14
 
@@ -62,8 +61,7 @@ resolve_klass(VALUE klass, unsigned int *klass_flags)
62
61
  return result;
63
62
  }
64
63
 
65
- VALUE
66
- resolve_klass_name(VALUE klass, unsigned int* klass_flags)
64
+ VALUE resolve_klass_name(VALUE klass, unsigned int* klass_flags)
67
65
  {
68
66
  VALUE result = Qnil;
69
67
 
@@ -83,8 +81,7 @@ resolve_klass_name(VALUE klass, unsigned int* klass_flags)
83
81
  return result;
84
82
  }
85
83
 
86
- st_data_t
87
- method_key(VALUE klass, VALUE msym)
84
+ st_data_t method_key(VALUE klass, VALUE msym)
88
85
  {
89
86
  VALUE resolved_klass = klass;
90
87
 
@@ -104,21 +101,18 @@ method_key(VALUE klass, VALUE msym)
104
101
  }
105
102
 
106
103
  /* ====== Allocation Table ====== */
107
- st_table*
108
- allocations_table_create()
104
+ st_table* allocations_table_create()
109
105
  {
110
- return st_init_numtable();
106
+ return rb_st_init_numtable();
111
107
  }
112
108
 
113
- static int
114
- allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
115
- {
109
+ static int allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
110
+ {
116
111
  prof_allocation_free((prof_allocation_t*)value);
117
112
  return ST_CONTINUE;
118
113
  }
119
114
 
120
- static int
121
- prof_method_collect_allocations(st_data_t key, st_data_t value, st_data_t result)
115
+ static int prof_method_collect_allocations(st_data_t key, st_data_t value, st_data_t result)
122
116
  {
123
117
  prof_allocation_t* allocation = (prof_allocation_t*)value;
124
118
  VALUE arr = (VALUE)result;
@@ -126,24 +120,21 @@ prof_method_collect_allocations(st_data_t key, st_data_t value, st_data_t result
126
120
  return ST_CONTINUE;
127
121
  }
128
122
 
129
- static int
130
- prof_method_mark_allocations(st_data_t key, st_data_t value, st_data_t data)
123
+ static int prof_method_mark_allocations(st_data_t key, st_data_t value, st_data_t data)
131
124
  {
132
125
  prof_allocation_t* allocation = (prof_allocation_t*)value;
133
126
  prof_allocation_mark(allocation);
134
127
  return ST_CONTINUE;
135
128
  }
136
129
 
137
- void
138
- allocations_table_free(st_table* table)
130
+ void allocations_table_free(st_table* table)
139
131
  {
140
- st_foreach(table, allocations_table_free_iterator, 0);
141
- st_free_table(table);
132
+ rb_st_foreach(table, allocations_table_free_iterator, 0);
133
+ rb_st_free_table(table);
142
134
  }
143
135
 
144
136
  /* ================ prof_method_t =================*/
145
- static prof_method_t*
146
- prof_get_method(VALUE self)
137
+ prof_method_t* prof_get_method(VALUE self)
147
138
  {
148
139
  /* Can't use Data_Get_Struct because that triggers the event hook
149
140
  ending up in endless recursion. */
@@ -155,27 +146,22 @@ prof_get_method(VALUE self)
155
146
  return result;
156
147
  }
157
148
 
158
- prof_method_t*
159
- prof_method_create(VALUE klass, VALUE msym, VALUE source_file, int source_line)
149
+ prof_method_t* prof_method_create(VALUE klass, VALUE msym, VALUE source_file, int source_line)
160
150
  {
161
- prof_method_t *result = ALLOC(prof_method_t);
151
+ prof_method_t* result = ALLOC(prof_method_t);
162
152
  result->key = method_key(klass, msym);
163
153
  result->klass_flags = 0;
164
154
 
165
- /* Note we do not call resolve_klass_name now because that causes an object allocation that shows up
155
+ /* Note we do not call resolve_klass_name now because that causes an object allocation that shows up
166
156
  in the allocation results so we want to avoid it until after the profile run is complete. */
167
157
  result->klass = resolve_klass(klass, &result->klass_flags);
168
158
  result->klass_name = Qnil;
169
159
  result->method_name = msym;
170
160
  result->measurement = prof_measurement_create();
171
161
 
172
- result->root = false;
173
- result->excluded = false;
174
-
175
- result->parent_call_infos = method_table_create();
176
- result->child_call_infos = method_table_create();
162
+ result->call_trees = prof_call_trees_create();
177
163
  result->allocations_table = allocations_table_create();
178
-
164
+
179
165
  result->visits = 0;
180
166
  result->recursive = false;
181
167
 
@@ -187,180 +173,108 @@ prof_method_create(VALUE klass, VALUE msym, VALUE source_file, int source_line)
187
173
  return result;
188
174
  }
189
175
 
190
- prof_method_t*
191
- prof_method_create_excluded(VALUE klass, VALUE msym)
192
- {
193
- prof_method_t* result = prof_method_create(klass, msym, Qnil, 0);
194
- result->excluded = 1;
195
- return result;
196
- }
197
-
198
- static int
199
- prof_method_collect_call_infos(st_data_t key, st_data_t value, st_data_t result)
200
- {
201
- prof_call_info_t* call_info = (prof_call_info_t*)value;
202
- VALUE arr = (VALUE)result;
203
- rb_ary_push(arr, prof_call_info_wrap(call_info));
204
- return ST_CONTINUE;
205
- }
206
-
207
- static int
208
- prof_method_mark_call_infos(st_data_t key, st_data_t value, st_data_t data)
209
- {
210
- prof_call_info_t* call_info = (prof_call_info_t*)value;
211
- prof_call_info_mark(call_info);
212
- return ST_CONTINUE;
213
- }
214
-
215
- static int
216
- call_infos_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
217
- {
218
- prof_call_info_free((prof_call_info_t*)value);
219
- return ST_CONTINUE;
220
- }
221
-
222
- void
223
- call_info_table_free(st_table* table)
224
- {
225
- st_foreach(table, call_infos_free_iterator, 0);
226
- st_free_table(table);
227
- }
228
-
229
176
  /* The underlying c structures are freed when the parent profile is freed.
230
177
  However, on shutdown the Ruby GC frees objects in any will-nilly order.
231
178
  That means the ruby thread object wrapping the c thread struct may
232
179
  be freed before the parent profile. Thus we add in a free function
233
180
  for the garbage collector so that if it does get called will nil
234
181
  out our Ruby object reference.*/
235
- static void
236
- prof_method_ruby_gc_free(void *data)
182
+ static void prof_method_ruby_gc_free(void* data)
237
183
  {
238
184
  prof_method_t* method = (prof_method_t*)data;
185
+ method->object = Qnil;
186
+ }
239
187
 
188
+ static void prof_method_free(prof_method_t* method)
189
+ {
240
190
  /* Has this method object been accessed by Ruby? If
241
- yes clean it up so to avoid a segmentation fault. */
242
- if (method->object != Qnil)
243
- {
191
+ yes clean it up so to avoid a segmentation fault. */
192
+ if (method->object != Qnil)
193
+ {
244
194
  RDATA(method->object)->dmark = NULL;
245
195
  RDATA(method->object)->dfree = NULL;
246
196
  RDATA(method->object)->data = NULL;
247
197
  method->object = Qnil;
248
198
  }
249
- method->klass_name = Qnil;
250
- method->method_name = Qnil;
251
- }
252
199
 
253
- static void
254
- prof_method_free(prof_method_t* method)
255
- {
256
- prof_method_ruby_gc_free(method);
257
200
  allocations_table_free(method->allocations_table);
258
-
259
- /* Remember call infos are referenced by their parent method and child method, so we only want
260
- to iterate over one of them to avoid a double freeing */
261
- call_info_table_free(method->parent_call_infos);
262
- xfree(method->child_call_infos);
263
-
201
+ prof_call_trees_free(method->call_trees);
264
202
  prof_measurement_free(method->measurement);
265
203
  xfree(method);
266
204
  }
267
205
 
268
- size_t
269
- prof_method_size(const void *data)
206
+ size_t prof_method_size(const void* data)
270
207
  {
271
208
  return sizeof(prof_method_t);
272
209
  }
273
210
 
274
- void
275
- prof_method_mark(void *data)
211
+ void prof_method_mark(void* data)
276
212
  {
277
213
  prof_method_t* method = (prof_method_t*)data;
214
+
215
+ if (method->object != Qnil)
216
+ rb_gc_mark(method->object);
217
+
278
218
  rb_gc_mark(method->klass_name);
279
219
  rb_gc_mark(method->method_name);
280
-
220
+ rb_gc_mark(method->source_file);
221
+
281
222
  if (method->klass != Qnil)
282
223
  rb_gc_mark(method->klass);
283
224
 
284
- if (method->object != Qnil)
285
- rb_gc_mark(method->object);
286
-
287
225
  prof_measurement_mark(method->measurement);
288
-
289
- st_foreach(method->parent_call_infos, prof_method_mark_call_infos, 0);
290
- st_foreach(method->child_call_infos, prof_method_mark_call_infos, 0);
291
- st_foreach(method->allocations_table, prof_method_mark_allocations, 0);
226
+
227
+ rb_st_foreach(method->allocations_table, prof_method_mark_allocations, 0);
292
228
  }
293
229
 
294
- static VALUE
295
- prof_method_allocate(VALUE klass)
230
+ static VALUE prof_method_allocate(VALUE klass)
296
231
  {
297
232
  prof_method_t* method_data = prof_method_create(Qnil, Qnil, Qnil, 0);
298
233
  method_data->object = prof_method_wrap(method_data);
299
234
  return method_data->object;
300
235
  }
301
236
 
302
- VALUE
303
- prof_method_wrap(prof_method_t *method)
237
+ VALUE prof_method_wrap(prof_method_t* method)
304
238
  {
305
- if (method->object == Qnil)
306
- {
307
- method->object = Data_Wrap_Struct(cRpMethodInfo, prof_method_mark, prof_method_ruby_gc_free, method);
308
- }
309
- return method->object;
310
- }
311
-
312
- prof_method_t *
313
- prof_method_get(VALUE self)
314
- {
315
- /* Can't use Data_Get_Struct because that triggers the event hook
316
- ending up in endless recursion. */
317
- prof_method_t* result = DATA_PTR(self);
318
-
319
- if (!result)
239
+ if (method->object == Qnil)
320
240
  {
321
- rb_raise(rb_eRuntimeError, "This RubyProf::MethodInfo instance has already been freed, likely because its profile has been freed.");
322
- }
323
-
324
- return result;
241
+ method->object = Data_Wrap_Struct(cRpMethodInfo, prof_method_mark, prof_method_ruby_gc_free, method);
242
+ }
243
+ return method->object;
325
244
  }
326
245
 
327
- st_table *
328
- method_table_create()
246
+ st_table* method_table_create()
329
247
  {
330
- return st_init_numtable();
248
+ return rb_st_init_numtable();
331
249
  }
332
250
 
333
- static int
334
- method_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
251
+ static int method_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
335
252
  {
336
- prof_method_free((prof_method_t *)value);
253
+ prof_method_free((prof_method_t*)value);
337
254
  return ST_CONTINUE;
338
255
  }
339
256
 
340
- void
341
- method_table_free(st_table *table)
257
+ void method_table_free(st_table* table)
342
258
  {
343
- st_foreach(table, method_table_free_iterator, 0);
344
- st_free_table(table);
259
+ rb_st_foreach(table, method_table_free_iterator, 0);
260
+ rb_st_free_table(table);
345
261
  }
346
262
 
347
- size_t
348
- method_table_insert(st_table *table, st_data_t key, prof_method_t *val)
263
+ size_t method_table_insert(st_table* table, st_data_t key, prof_method_t* val)
349
264
  {
350
- return st_insert(table, (st_data_t) key, (st_data_t) val);
265
+ return rb_st_insert(table, (st_data_t)key, (st_data_t)val);
351
266
  }
352
267
 
353
- prof_method_t *
354
- method_table_lookup(st_table *table, st_data_t key)
268
+ prof_method_t* method_table_lookup(st_table* table, st_data_t key)
355
269
  {
356
270
  st_data_t val;
357
- if (st_lookup(table, (st_data_t)key, &val))
271
+ if (rb_st_lookup(table, (st_data_t)key, &val))
358
272
  {
359
- return (prof_method_t *) val;
273
+ return (prof_method_t*)val;
360
274
  }
361
275
  else
362
276
  {
363
- return NULL;
277
+ return NULL;
364
278
  }
365
279
  }
366
280
 
@@ -374,42 +288,15 @@ created. RubyProf::MethodInfo objects can be accessed via
374
288
  the RubyProf::Profile object.
375
289
  */
376
290
 
377
- /* call-seq:
378
- callers -> array
379
-
380
- Returns an array of call info objects that called this method (ie, parents).*/
381
- static VALUE
382
- prof_method_callers(VALUE self)
383
- {
384
- prof_method_t* method = prof_get_method(self);
385
- VALUE result = rb_ary_new();
386
- st_foreach(method->parent_call_infos, prof_method_collect_call_infos, result);
387
- return result;
388
- }
389
-
390
- /* call-seq:
391
- callees -> array
392
-
393
- Returns an array of call info objects that this method called (ie, children).*/
394
- static VALUE
395
- prof_method_callees(VALUE self)
396
- {
397
- prof_method_t* method = prof_get_method(self);
398
- VALUE result = rb_ary_new();
399
- st_foreach(method->child_call_infos, prof_method_collect_call_infos, result);
400
- return result;
401
- }
402
-
403
291
  /* call-seq:
404
292
  allocations -> array
405
293
 
406
294
  Returns an array of allocation information.*/
407
- static VALUE
408
- prof_method_allocations(VALUE self)
295
+ static VALUE prof_method_allocations(VALUE self)
409
296
  {
410
297
  prof_method_t* method = prof_get_method(self);
411
298
  VALUE result = rb_ary_new();
412
- st_foreach(method->allocations_table, prof_method_collect_allocations, result);
299
+ rb_st_foreach(method->allocations_table, prof_method_collect_allocations, result);
413
300
  return result;
414
301
  }
415
302
 
@@ -417,8 +304,7 @@ prof_method_allocations(VALUE self)
417
304
  called -> Measurement
418
305
 
419
306
  Returns the measurement associated with this method. */
420
- static VALUE
421
- prof_method_measurement(VALUE self)
307
+ static VALUE prof_method_measurement(VALUE self)
422
308
  {
423
309
  prof_method_t* method = prof_get_method(self);
424
310
  return prof_measurement_wrap(method->measurement);
@@ -431,7 +317,7 @@ return the source file of the method
431
317
  */
432
318
  static VALUE prof_method_source_file(VALUE self)
433
319
  {
434
- prof_method_t* method = prof_method_get(self);
320
+ prof_method_t* method = prof_get_method(self);
435
321
  return method->source_file;
436
322
  }
437
323
 
@@ -439,10 +325,9 @@ static VALUE prof_method_source_file(VALUE self)
439
325
  line_no -> int
440
326
 
441
327
  returns the line number of the method */
442
- static VALUE
443
- prof_method_line(VALUE self)
328
+ static VALUE prof_method_line(VALUE self)
444
329
  {
445
- prof_method_t* method = prof_method_get(self);
330
+ prof_method_t* method = prof_get_method(self);
446
331
  return INT2FIX(method->source_line);
447
332
  }
448
333
 
@@ -452,10 +337,9 @@ prof_method_line(VALUE self)
452
337
  Returns the name of this method's class. Singleton classes
453
338
  will have the form <Object::Object>. */
454
339
 
455
- static VALUE
456
- prof_method_klass_name(VALUE self)
340
+ static VALUE prof_method_klass_name(VALUE self)
457
341
  {
458
- prof_method_t *method = prof_method_get(self);
342
+ prof_method_t* method = prof_get_method(self);
459
343
  if (method->klass_name == Qnil)
460
344
  method->klass_name = resolve_klass_name(method->klass, &method->klass_flags);
461
345
 
@@ -467,10 +351,9 @@ prof_method_klass_name(VALUE self)
467
351
 
468
352
  Returns the klass flags */
469
353
 
470
- static VALUE
471
- prof_method_klass_flags(VALUE self)
354
+ static VALUE prof_method_klass_flags(VALUE self)
472
355
  {
473
- prof_method_t* method = prof_method_get(self);
356
+ prof_method_t* method = prof_get_method(self);
474
357
  return INT2FIX(method->klass_flags);
475
358
  }
476
359
 
@@ -480,49 +363,34 @@ prof_method_klass_flags(VALUE self)
480
363
  Returns the name of this method in the format Object#method. Singletons
481
364
  methods will be returned in the format <Object::Object>#method.*/
482
365
 
483
- static VALUE
484
- prof_method_name(VALUE self)
366
+ static VALUE prof_method_name(VALUE self)
485
367
  {
486
- prof_method_t *method = prof_method_get(self);
368
+ prof_method_t* method = prof_get_method(self);
487
369
  return method->method_name;
488
370
  }
489
371
 
490
- /* call-seq:
491
- root? -> boolean
492
-
493
- Returns the true if this method is at the top of the call stack */
494
- static VALUE
495
- prof_method_root(VALUE self)
496
- {
497
- prof_method_t *method = prof_method_get(self);
498
- return method->root ? Qtrue : Qfalse;
499
- }
500
-
501
372
  /* call-seq:
502
373
  recursive? -> boolean
503
374
 
504
375
  Returns the true if this method is recursively invoked */
505
- static VALUE
506
- prof_method_recursive(VALUE self)
376
+ static VALUE prof_method_recursive(VALUE self)
507
377
  {
508
- prof_method_t* method = prof_method_get(self);
378
+ prof_method_t* method = prof_get_method(self);
509
379
  return method->recursive ? Qtrue : Qfalse;
510
380
  }
511
381
 
512
382
  /* call-seq:
513
- excluded? -> boolean
383
+ call_trees -> CallTrees
514
384
 
515
- Returns the true if this method was excluded */
516
- static VALUE
517
- prof_method_excluded(VALUE self)
385
+ Returns the CallTrees associated with this method. */
386
+ static VALUE prof_method_call_trees(VALUE self)
518
387
  {
519
- prof_method_t* method = prof_method_get(self);
520
- return method->excluded ? Qtrue : Qfalse;
388
+ prof_method_t* method = prof_get_method(self);
389
+ return prof_call_trees_wrap(method->call_trees);
521
390
  }
522
391
 
523
392
  /* :nodoc: */
524
- static VALUE
525
- prof_method_dump(VALUE self)
393
+ static VALUE prof_method_dump(VALUE self)
526
394
  {
527
395
  prof_method_t* method_data = DATA_PTR(self);
528
396
  VALUE result = rb_hash_new();
@@ -532,25 +400,19 @@ prof_method_dump(VALUE self)
532
400
  rb_hash_aset(result, ID2SYM(rb_intern("method_name")), method_data->method_name);
533
401
 
534
402
  rb_hash_aset(result, ID2SYM(rb_intern("key")), INT2FIX(method_data->key));
535
- rb_hash_aset(result, ID2SYM(rb_intern("root")), prof_method_root(self));
536
403
  rb_hash_aset(result, ID2SYM(rb_intern("recursive")), prof_method_recursive(self));
537
- rb_hash_aset(result, ID2SYM(rb_intern("excluded")), prof_method_excluded(self));
538
404
  rb_hash_aset(result, ID2SYM(rb_intern("source_file")), method_data->source_file);
539
405
  rb_hash_aset(result, ID2SYM(rb_intern("source_line")), INT2FIX(method_data->source_line));
540
406
 
407
+ rb_hash_aset(result, ID2SYM(rb_intern("call_trees")), prof_call_trees_wrap(method_data->call_trees));
541
408
  rb_hash_aset(result, ID2SYM(rb_intern("measurement")), prof_measurement_wrap(method_data->measurement));
542
-
543
- rb_hash_aset(result, ID2SYM(rb_intern("callers")), prof_method_callers(self));
544
- rb_hash_aset(result, ID2SYM(rb_intern("callees")), prof_method_callees(self));
545
-
546
409
  rb_hash_aset(result, ID2SYM(rb_intern("allocations")), prof_method_allocations(self));
547
410
 
548
411
  return result;
549
412
  }
550
413
 
551
414
  /* :nodoc: */
552
- static VALUE
553
- prof_method_load(VALUE self, VALUE data)
415
+ static VALUE prof_method_load(VALUE self, VALUE data)
554
416
  {
555
417
  prof_method_t* method_data = RDATA(self)->data;
556
418
  method_data->object = self;
@@ -560,42 +422,24 @@ prof_method_load(VALUE self, VALUE data)
560
422
  method_data->method_name = rb_hash_aref(data, ID2SYM(rb_intern("method_name")));
561
423
  method_data->key = FIX2LONG(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
562
424
 
563
- method_data->root = rb_hash_aref(data, ID2SYM(rb_intern("root"))) == Qtrue ? true : false;
564
425
  method_data->recursive = rb_hash_aref(data, ID2SYM(rb_intern("recursive"))) == Qtrue ? true : false;
565
- method_data->excluded = rb_hash_aref(data, ID2SYM(rb_intern("excluded"))) == Qtrue ? true : false;
566
426
 
567
427
  method_data->source_file = rb_hash_aref(data, ID2SYM(rb_intern("source_file")));
568
428
  method_data->source_line = FIX2INT(rb_hash_aref(data, ID2SYM(rb_intern("source_line"))));
569
429
 
430
+ VALUE call_trees = rb_hash_aref(data, ID2SYM(rb_intern("call_trees")));
431
+ method_data->call_trees = prof_get_call_trees(call_trees);
432
+
570
433
  VALUE measurement = rb_hash_aref(data, ID2SYM(rb_intern("measurement")));
571
434
  method_data->measurement = prof_get_measurement(measurement);
572
435
 
573
- VALUE callers = rb_hash_aref(data, ID2SYM(rb_intern("callers")));
574
- for (int i = 0; i < rb_array_len(callers); i++)
575
- {
576
- VALUE call_info = rb_ary_entry(callers, i);
577
- prof_call_info_t *call_info_data = prof_get_call_info(call_info);
578
- st_data_t key = call_info_data->parent ? call_info_data->parent->key : method_key(Qnil, 0);
579
- call_info_table_insert(method_data->parent_call_infos, key, call_info_data);
580
- }
581
-
582
- VALUE callees = rb_hash_aref(data, ID2SYM(rb_intern("callees")));
583
- for (int i = 0; i < rb_array_len(callees); i++)
584
- {
585
- VALUE call_info = rb_ary_entry(callees, i);
586
- prof_call_info_t *call_info_data = prof_get_call_info(call_info);
587
-
588
- st_data_t key = call_info_data->method ? call_info_data->method->key : method_key(Qnil, 0);
589
- call_info_table_insert(method_data->child_call_infos, key, call_info_data);
590
- }
591
-
592
436
  VALUE allocations = rb_hash_aref(data, ID2SYM(rb_intern("allocations")));
593
437
  for (int i = 0; i < rb_array_len(allocations); i++)
594
438
  {
595
439
  VALUE allocation = rb_ary_entry(allocations, i);
596
440
  prof_allocation_t* allocation_data = prof_allocation_get(allocation);
597
441
 
598
- st_insert(method_data->allocations_table, allocation_data->key, (st_data_t)allocation_data);
442
+ rb_st_insert(method_data->allocations_table, allocation_data->key, (st_data_t)allocation_data);
599
443
  }
600
444
  return data;
601
445
  }
@@ -609,22 +453,18 @@ void rp_init_method_info()
609
453
 
610
454
  rb_define_method(cRpMethodInfo, "klass_name", prof_method_klass_name, 0);
611
455
  rb_define_method(cRpMethodInfo, "klass_flags", prof_method_klass_flags, 0);
612
-
613
456
  rb_define_method(cRpMethodInfo, "method_name", prof_method_name, 0);
614
-
615
- rb_define_method(cRpMethodInfo, "callers", prof_method_callers, 0);
616
- rb_define_method(cRpMethodInfo, "callees", prof_method_callees, 0);
617
- rb_define_method(cRpMethodInfo, "allocations", prof_method_allocations, 0);
618
457
 
458
+ rb_define_method(cRpMethodInfo, "call_trees", prof_method_call_trees, 0);
459
+
460
+ rb_define_method(cRpMethodInfo, "allocations", prof_method_allocations, 0);
619
461
  rb_define_method(cRpMethodInfo, "measurement", prof_method_measurement, 0);
620
-
462
+
621
463
  rb_define_method(cRpMethodInfo, "source_file", prof_method_source_file, 0);
622
464
  rb_define_method(cRpMethodInfo, "line", prof_method_line, 0);
623
465
 
624
- rb_define_method(cRpMethodInfo, "root?", prof_method_root, 0);
625
466
  rb_define_method(cRpMethodInfo, "recursive?", prof_method_recursive, 0);
626
- rb_define_method(cRpMethodInfo, "excluded?", prof_method_excluded, 0);
627
467
 
628
468
  rb_define_method(cRpMethodInfo, "_dump_data", prof_method_dump, 0);
629
469
  rb_define_method(cRpMethodInfo, "_load_data", prof_method_load, 1);
630
- }
470
+ }