ruby-prof 0.16.2 → 0.17.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (81) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES +15 -0
  3. data/README.rdoc +36 -5
  4. data/bin/ruby-prof +7 -2
  5. data/doc/LICENSE.html +2 -1
  6. data/doc/README_rdoc.html +42 -8
  7. data/doc/Rack.html +2 -1
  8. data/doc/Rack/RubyProf.html +25 -18
  9. data/doc/Rack/RubyProf/RackProfiler.html +343 -0
  10. data/doc/RubyProf.html +14 -2
  11. data/doc/RubyProf/AbstractPrinter.html +91 -12
  12. data/doc/RubyProf/AggregateCallInfo.html +2 -1
  13. data/doc/RubyProf/CallInfo.html +18 -78
  14. data/doc/RubyProf/CallInfoPrinter.html +2 -1
  15. data/doc/RubyProf/CallInfoVisitor.html +2 -1
  16. data/doc/RubyProf/CallStackPrinter.html +35 -29
  17. data/doc/RubyProf/CallTreePrinter.html +98 -14
  18. data/doc/RubyProf/Cmd.html +11 -5
  19. data/doc/RubyProf/DeprecationWarnings.html +148 -0
  20. data/doc/RubyProf/DotPrinter.html +2 -1
  21. data/doc/RubyProf/FlatPrinter.html +2 -1
  22. data/doc/RubyProf/FlatPrinterWithLineNumbers.html +7 -5
  23. data/doc/RubyProf/GraphHtmlPrinter.html +18 -12
  24. data/doc/RubyProf/GraphPrinter.html +2 -1
  25. data/doc/RubyProf/MethodInfo.html +19 -88
  26. data/doc/RubyProf/MultiPrinter.html +231 -17
  27. data/doc/RubyProf/Profile.html +184 -39
  28. data/doc/RubyProf/Profile/ExcludeCommonMethods.html +411 -0
  29. data/doc/RubyProf/Profile/LegacyMethodElimination.html +158 -0
  30. data/doc/RubyProf/ProfileTask.html +2 -1
  31. data/doc/RubyProf/Thread.html +4 -39
  32. data/doc/created.rid +21 -19
  33. data/doc/css/fonts.css +6 -6
  34. data/doc/examples/flat_txt.html +2 -1
  35. data/doc/examples/graph_html.html +2 -1
  36. data/doc/examples/graph_txt.html +2 -1
  37. data/doc/index.html +47 -7
  38. data/doc/js/darkfish.js +7 -7
  39. data/doc/js/search_index.js +1 -1
  40. data/doc/js/search_index.js.gz +0 -0
  41. data/doc/js/searcher.js +1 -0
  42. data/doc/js/searcher.js.gz +0 -0
  43. data/doc/table_of_contents.html +190 -80
  44. data/ext/ruby_prof/extconf.rb +4 -0
  45. data/ext/ruby_prof/rp_call_info.c +19 -1
  46. data/ext/ruby_prof/rp_call_info.h +8 -3
  47. data/ext/ruby_prof/rp_method.c +282 -57
  48. data/ext/ruby_prof/rp_method.h +28 -5
  49. data/ext/ruby_prof/rp_stack.c +69 -24
  50. data/ext/ruby_prof/rp_stack.h +21 -9
  51. data/ext/ruby_prof/rp_thread.c +4 -1
  52. data/ext/ruby_prof/ruby_prof.c +142 -39
  53. data/ext/ruby_prof/ruby_prof.h +3 -0
  54. data/lib/ruby-prof.rb +10 -0
  55. data/lib/ruby-prof/call_info.rb +0 -11
  56. data/lib/ruby-prof/method_info.rb +4 -12
  57. data/lib/ruby-prof/printers/abstract_printer.rb +19 -1
  58. data/lib/ruby-prof/printers/call_info_printer.rb +1 -1
  59. data/lib/ruby-prof/printers/call_stack_printer.rb +9 -4
  60. data/lib/ruby-prof/printers/call_tree_printer.rb +15 -2
  61. data/lib/ruby-prof/printers/flat_printer_with_line_numbers.rb +23 -4
  62. data/lib/ruby-prof/printers/graph_html_printer.rb +10 -5
  63. data/lib/ruby-prof/printers/graph_printer.rb +2 -2
  64. data/lib/ruby-prof/printers/multi_printer.rb +44 -18
  65. data/lib/ruby-prof/profile.rb +13 -42
  66. data/lib/ruby-prof/profile/exclude_common_methods.rb +201 -0
  67. data/lib/ruby-prof/profile/legacy_method_elimination.rb +49 -0
  68. data/lib/ruby-prof/rack.rb +130 -51
  69. data/lib/ruby-prof/thread.rb +0 -6
  70. data/lib/ruby-prof/version.rb +1 -1
  71. data/ruby-prof.gemspec +4 -3
  72. data/test/aggregate_test.rb +1 -1
  73. data/test/exclude_methods_test.rb +146 -0
  74. data/test/line_number_test.rb +12 -3
  75. data/test/multi_printer_test.rb +23 -2
  76. data/test/no_method_class_test.rb +1 -1
  77. data/test/printers_test.rb +21 -1
  78. data/test/rack_test.rb +64 -0
  79. data/test/recursive_test.rb +15 -15
  80. data/test/test_helper.rb +11 -0
  81. metadata +20 -13
@@ -16,18 +16,39 @@ typedef struct
16
16
  st_index_t key; /* Cache calculated key */
17
17
  } prof_method_key_t;
18
18
 
19
+ /* Source relation bit offsets. */
20
+ enum {
21
+ kModuleIncludee = 0, /* Included module */
22
+ kModuleSingleton, /* Singleton class of a module */
23
+ kObjectSingleton /* Singleton class of an object */
24
+ };
19
25
 
20
26
  /* Forward declaration, see rp_call_info.h */
21
27
  struct prof_call_infos_t;
22
28
 
23
29
  /* Profiling information for each method. */
24
- typedef struct
30
+ /* Excluded methods have no call_infos, source_klass, or source_file. */
31
+ typedef struct
25
32
  {
26
- prof_method_key_t *key; /* Method key */
27
- const char *source_file; /* The method's source file */
28
- int line; /* The method's line number. */
29
- struct prof_call_infos_t *call_infos; /* Call info objects for this method */
33
+ /* Hot */
34
+
35
+ prof_method_key_t *key; /* Table key */
36
+
37
+ struct prof_call_infos_t *call_infos; /* Call infos */
38
+ int visits; /* Current visits on the stack */
39
+
40
+ unsigned int excluded : 1; /* Exclude from profile? */
41
+ unsigned int recursive : 1; /* Recursive (direct or mutual)? */
42
+
43
+ /* Cold */
44
+
30
45
  VALUE object; /* Cached ruby object */
46
+ VALUE source_klass; /* Source class */
47
+ const char *source_file; /* Source file */
48
+ int line; /* Line number */
49
+
50
+ unsigned int resolved : 1; /* Source resolved? */
51
+ unsigned int relation : 3; /* Source relation bits */
31
52
  } prof_method_t;
32
53
 
33
54
  void rp_init_method_info(void);
@@ -40,6 +61,8 @@ size_t method_table_insert(st_table *table, const prof_method_key_t *key, prof_m
40
61
  void method_table_free(st_table *table);
41
62
 
42
63
  prof_method_t* prof_method_create(VALUE klass, ID mid, const char* source_file, int line);
64
+ prof_method_t* prof_method_create_excluded(VALUE klass, ID mid);
65
+
43
66
  VALUE prof_method_wrap(prof_method_t *result);
44
67
  void prof_method_mark(prof_method_t *method);
45
68
 
@@ -43,13 +43,15 @@ prof_stack_free(prof_stack_t *stack)
43
43
  }
44
44
 
45
45
  prof_frame_t *
46
- prof_stack_push(prof_stack_t *stack, double measurement)
46
+ prof_stack_push(prof_stack_t *stack, prof_call_info_t *call_info, double measurement, int paused)
47
47
  {
48
- prof_frame_t* result = NULL;
48
+ prof_frame_t *result;
49
+ prof_frame_t* parent_frame;
50
+ prof_method_t *method;
49
51
 
50
52
  /* Is there space on the stack? If not, double
51
53
  its size. */
52
- if (stack->ptr == stack->end )
54
+ if (stack->ptr == stack->end)
53
55
  {
54
56
  size_t len = stack->ptr - stack->start;
55
57
  size_t new_capacity = (stack->end - stack->start) * 2;
@@ -59,17 +61,42 @@ prof_stack_push(prof_stack_t *stack, double measurement)
59
61
  stack->end = stack->start + new_capacity;
60
62
  }
61
63
 
62
- // Setup returned stack pointer to be valid
63
- result = stack->ptr;
64
- result->child_time = 0;
64
+ parent_frame = prof_stack_peek(stack);
65
+
66
+ // Reserve the next available frame pointer.
67
+ result = stack->ptr++;
68
+
69
+ result->call_info = call_info;
70
+ result->call_info->depth = (int)(stack->ptr - stack->start); // shortening of 64 bit into 32;
71
+ result->passes = 0;
72
+
73
+ result->start_time = measurement;
74
+ result->pause_time = -1; // init as not paused.
65
75
  result->switch_time = 0;
66
76
  result->wait_time = 0;
77
+ result->child_time = 0;
67
78
  result->dead_time = 0;
68
- result->depth = (int)(stack->ptr - stack->start); // shortening of 64 bit into 32
69
- result->start_time = measurement;
70
79
 
71
- // Increment the stack ptr for next time
72
- stack->ptr++;
80
+ method = call_info->target;
81
+
82
+ /* If the method was visited previously, it's recursive. */
83
+ if (method->visits > 0)
84
+ {
85
+ method->recursive = 1;
86
+ call_info->recursive = 1;
87
+ }
88
+ /* Enter the method. */
89
+ method->visits++;
90
+
91
+ // Unpause the parent frame, if it exists.
92
+ // If currently paused then:
93
+ // 1) The child frame will begin paused.
94
+ // 2) The parent will inherit the child's dead time.
95
+ prof_frame_unpause(parent_frame, measurement);
96
+
97
+ if (paused) {
98
+ prof_frame_pause(result, measurement);
99
+ }
73
100
 
74
101
  // Return the result
75
102
  return result;
@@ -78,21 +105,33 @@ prof_stack_push(prof_stack_t *stack, double measurement)
78
105
  prof_frame_t *
79
106
  prof_stack_pop(prof_stack_t *stack, double measurement)
80
107
  {
81
- prof_frame_t *frame = NULL;
82
- prof_frame_t* parent_frame = NULL;
108
+ prof_frame_t *frame;
109
+ prof_frame_t *parent_frame;
83
110
  prof_call_info_t *call_info;
111
+ prof_method_t *method;
84
112
 
85
113
  double total_time;
86
114
  double self_time;
87
115
 
88
- /* Frame can be null. This can happen if RubProf.start is called from
89
- a method that exits. And it can happen if an exception is raised
90
- in code that is being profiled and the stack unwinds (RubyProf is
91
- not notified of that by the ruby runtime. */
92
- if (stack->ptr == stack->start)
116
+ frame = prof_stack_peek(stack);
117
+
118
+ /* Frame can be null, which means the stack is empty. This can happen if
119
+ RubProf.start is called from a method that exits. And it can happen if an
120
+ exception is raised in code that is being profiled and the stack unwinds
121
+ (RubyProf is not notified of that by the ruby runtime. */
122
+ if (!frame) {
93
123
  return NULL;
94
-
95
- frame = --stack->ptr;
124
+ }
125
+
126
+ /* Match passes until we reach the frame itself. */
127
+ if (prof_frame_is_pass(frame)) {
128
+ frame->passes--;
129
+ /* Additional frames can be consumed. See pop_frames(). */
130
+ return frame;
131
+ }
132
+
133
+ /* Consume this frame. */
134
+ stack->ptr--;
96
135
 
97
136
  /* Calculate the total time this method took */
98
137
  prof_frame_unpause(frame, measurement);
@@ -101,11 +140,16 @@ prof_stack_pop(prof_stack_t *stack, double measurement)
101
140
 
102
141
  /* Update information about the current method */
103
142
  call_info = frame->call_info;
143
+ method = call_info->target;
144
+
104
145
  call_info->called++;
105
146
  call_info->total_time += total_time;
106
147
  call_info->self_time += self_time;
107
148
  call_info->wait_time += frame->wait_time;
108
149
 
150
+ /* Leave the method. */
151
+ method->visits--;
152
+
109
153
  parent_frame = prof_stack_peek(stack);
110
154
  if (parent_frame)
111
155
  {
@@ -119,10 +163,11 @@ prof_stack_pop(prof_stack_t *stack, double measurement)
119
163
  }
120
164
 
121
165
  prof_frame_t *
122
- prof_stack_peek(prof_stack_t *stack)
166
+ prof_stack_pass(prof_stack_t *stack)
123
167
  {
124
- if (stack->ptr == stack->start)
125
- return NULL;
126
- else
127
- return stack->ptr - 1;
168
+ prof_frame_t *frame = prof_stack_peek(stack);
169
+ if (frame) {
170
+ frame->passes++;
171
+ }
172
+ return frame;
128
173
  }
@@ -13,39 +13,51 @@
13
13
  /* Temporary object that maintains profiling information
14
14
  for active methods. They are created and destroyed
15
15
  as the program moves up and down its stack. */
16
- typedef struct
16
+ typedef struct
17
17
  {
18
18
  /* Caching prof_method_t values significantly
19
19
  increases performance. */
20
20
  prof_call_info_t *call_info;
21
+
22
+ unsigned int line;
23
+ unsigned int passes; /* Count of "pass" frames, _after_ this one. */
24
+
21
25
  double start_time;
22
26
  double switch_time; /* Time at switch to different thread */
23
27
  double wait_time;
24
28
  double child_time;
25
29
  double pause_time; // Time pause() was initiated
26
30
  double dead_time; // Time to ignore (i.e. total amount of time between pause/resume blocks)
27
- int depth;
28
- unsigned int line;
29
31
  } prof_frame_t;
30
32
 
33
+ #define prof_frame_is_real(f) ((f)->passes == 0)
34
+ #define prof_frame_is_pass(f) ((f)->passes > 0)
35
+
31
36
  #define prof_frame_is_paused(f) (f->pause_time >= 0)
32
37
  #define prof_frame_is_unpaused(f) (f->pause_time < 0)
38
+
33
39
  void prof_frame_pause(prof_frame_t*, double current_measurement);
34
40
  void prof_frame_unpause(prof_frame_t*, double current_measurement);
35
41
 
36
-
37
42
  /* Current stack of active methods.*/
38
- typedef struct
43
+ typedef struct
39
44
  {
40
45
  prof_frame_t *start;
41
46
  prof_frame_t *end;
42
47
  prof_frame_t *ptr;
43
48
  } prof_stack_t;
44
49
 
45
- prof_stack_t * prof_stack_create();
50
+ prof_stack_t *prof_stack_create();
46
51
  void prof_stack_free(prof_stack_t *stack);
47
- prof_frame_t * prof_stack_push(prof_stack_t *stack, double measurement);
48
- prof_frame_t * prof_stack_pop(prof_stack_t *stack, double measurement);
49
- prof_frame_t * prof_stack_peek(prof_stack_t *stack);
52
+
53
+ prof_frame_t *prof_stack_push(prof_stack_t *stack, prof_call_info_t *call_info, double measurement, int paused);
54
+ prof_frame_t *prof_stack_pop(prof_stack_t *stack, double measurement);
55
+ prof_frame_t *prof_stack_pass(prof_stack_t *stack);
56
+
57
+ static inline prof_frame_t *
58
+ prof_stack_peek(prof_stack_t *stack) {
59
+ return stack->ptr != stack->start ? stack->ptr - 1 : NULL;
60
+ }
61
+
50
62
 
51
63
  #endif //__RP_STACK__
@@ -217,7 +217,10 @@ collect_methods(st_data_t key, st_data_t value, st_data_t result)
217
217
  We want to store the method info information into an array.*/
218
218
  VALUE methods = (VALUE) result;
219
219
  prof_method_t *method = (prof_method_t *) value;
220
- rb_ary_push(methods, prof_method_wrap(method));
220
+
221
+ if (!method->excluded) {
222
+ rb_ary_push(methods, prof_method_wrap(method));
223
+ }
221
224
 
222
225
  return ST_CONTINUE;
223
226
  }
@@ -28,6 +28,7 @@
28
28
 
29
29
  VALUE mProf;
30
30
  VALUE cProfile;
31
+ VALUE cExcludeCommonMethods;
31
32
 
32
33
  static prof_profile_t*
33
34
  prof_get_profile(VALUE self)
@@ -70,6 +71,19 @@ get_event_name(rb_event_flag_t event)
70
71
  }
71
72
  }
72
73
 
74
+ static int
75
+ excludes_method(prof_method_key_t *key, prof_profile_t *profile)
76
+ {
77
+ return (profile->exclude_methods_tbl &&
78
+ method_table_lookup(profile->exclude_methods_tbl, key) != NULL);
79
+ }
80
+
81
+ static void
82
+ prof_exclude_common_methods(VALUE profile)
83
+ {
84
+ rb_funcall(cExcludeCommonMethods, rb_intern("apply!"), 1, profile);
85
+ }
86
+
73
87
  static prof_method_t*
74
88
  create_method(rb_event_flag_t event, VALUE klass, ID mid, const char* source_file, int line)
75
89
  {
@@ -83,24 +97,36 @@ create_method(rb_event_flag_t event, VALUE klass, ID mid, const char* source_fil
83
97
  return prof_method_create(klass, mid, source_file, line);
84
98
  }
85
99
 
86
-
87
100
  static prof_method_t*
88
- get_method(rb_event_flag_t event, VALUE klass, ID mid, thread_data_t* thread_data)
101
+ get_method(rb_event_flag_t event, VALUE klass, ID mid, thread_data_t *thread_data, prof_profile_t *profile)
89
102
  {
90
103
  prof_method_key_t key;
91
104
  prof_method_t *method = NULL;
92
105
 
106
+ /* Probe the local table. */
93
107
  method_key(&key, klass, mid);
94
108
  method = method_table_lookup(thread_data->method_table, &key);
95
109
 
96
110
  if (!method)
97
111
  {
98
- const char* source_file = rb_sourcefile();
99
- int line = rb_sourceline();
112
+ /* Didn't find it; are we excluding it specifically? */
113
+ if (excludes_method(&key, profile)) {
114
+ /* We found a exclusion sentinel so propagate it into the thread's local hash table. */
115
+ /* TODO(nelgau): Is there a way to avoid this allocation completely so that all these
116
+ tables share the same exclusion method struct? The first attempt failed due to my
117
+ ignorance of the whims of the GC. */
118
+ method = prof_method_create_excluded(klass, mid);
119
+ } else {
120
+ /* This method has no entry for this thread/fiber and isn't specifically excluded. */
121
+ const char* source_file = rb_sourcefile();
122
+ int line = rb_sourceline();
123
+ method = create_method(event, klass, mid, source_file, line);
124
+ }
100
125
 
101
- method = create_method(event, klass, mid, source_file, line);
126
+ /* Insert the newly created method, or the exlcusion sentinel. */
102
127
  method_table_insert(thread_data->method_table, method->key, method);
103
128
  }
129
+
104
130
  return method;
105
131
  }
106
132
 
@@ -238,10 +264,11 @@ prof_event_hook(rb_event_flag_t event, VALUE data, VALUE self, ID mid, VALUE kla
238
264
  /* Keep track of the current line number in this method. When
239
265
  a new method is called, we know what line number it was
240
266
  called from. */
241
-
242
267
  if (frame)
243
268
  {
244
- frame->line = rb_sourceline();
269
+ if (prof_frame_is_real(frame)) {
270
+ frame->line = rb_sourceline();
271
+ }
245
272
  break;
246
273
  }
247
274
 
@@ -252,10 +279,16 @@ prof_event_hook(rb_event_flag_t event, VALUE data, VALUE self, ID mid, VALUE kla
252
279
  case RUBY_EVENT_CALL:
253
280
  case RUBY_EVENT_C_CALL:
254
281
  {
255
- prof_call_info_t *call_info = NULL;
256
- prof_method_t *method = NULL;
282
+ prof_frame_t *next_frame;
283
+ prof_call_info_t *call_info;
284
+ prof_method_t *method;
257
285
 
258
- method = get_method(event, klass, mid, thread_data);
286
+ method = get_method(event, klass, mid, thread_data, profile);
287
+
288
+ if (method->excluded) {
289
+ prof_stack_pass(thread_data->stack);
290
+ break;
291
+ }
259
292
 
260
293
  if (!frame)
261
294
  {
@@ -274,26 +307,18 @@ prof_event_hook(rb_event_flag_t event, VALUE data, VALUE self, ID mid, VALUE kla
274
307
  call_info_table_insert(frame->call_info->call_infos, method->key, call_info);
275
308
  prof_add_call_info(method->call_infos, call_info);
276
309
  }
277
-
278
- // Unpause the parent frame. If currently paused then:
279
- // 1) The child frame will begin paused.
280
- // 2) The parent will inherit the child's dead time.
281
- prof_frame_unpause(frame, measurement);
282
310
  }
283
311
 
284
312
  /* Push a new frame onto the stack for a new c-call or ruby call (into a method) */
285
- frame = prof_stack_push(thread_data->stack, measurement);
286
- frame->call_info = call_info;
287
- frame->call_info->depth = frame->depth;
288
- frame->pause_time = profile->paused == Qtrue ? measurement : -1;
289
- frame->line = rb_sourceline();
313
+ next_frame = prof_stack_push(thread_data->stack, call_info, measurement, RTEST(profile->paused));
314
+ next_frame->line = rb_sourceline();
290
315
  break;
291
316
  }
292
317
  case RUBY_EVENT_RETURN:
293
318
  case RUBY_EVENT_C_RETURN:
294
319
  {
295
- prof_stack_pop(thread_data->stack, measurement);
296
- break;
320
+ prof_stack_pop(thread_data->stack, measurement);
321
+ break;
297
322
  }
298
323
  }
299
324
  }
@@ -307,10 +332,19 @@ prof_install_hook(VALUE self)
307
332
  RUBY_EVENT_LINE, self);
308
333
  }
309
334
 
335
+ #ifdef HAVE_RB_REMOVE_EVENT_HOOK_WITH_DATA
336
+ extern int
337
+ rb_remove_event_hook_with_data(rb_event_hook_func_t func, VALUE data);
338
+ #endif
339
+
310
340
  void
311
- prof_remove_hook()
341
+ prof_remove_hook(VALUE self)
312
342
  {
343
+ #ifdef HAVE_RB_REMOVE_EVENT_HOOK_WITH_DATA
344
+ rb_remove_event_hook_with_data(prof_event_hook, self);
345
+ #else
313
346
  rb_remove_event_hook(prof_event_hook);
347
+ #endif
314
348
  }
315
349
 
316
350
  static int
@@ -331,10 +365,19 @@ mark_threads(st_data_t key, st_data_t value, st_data_t result)
331
365
  return ST_CONTINUE;
332
366
  }
333
367
 
368
+ static int
369
+ mark_methods(st_data_t key, st_data_t value, st_data_t result)
370
+ {
371
+ prof_method_t *method = (prof_method_t *) value;
372
+ prof_method_mark(method);
373
+ return ST_CONTINUE;
374
+ }
375
+
334
376
  static void
335
377
  prof_mark(prof_profile_t *profile)
336
378
  {
337
379
  st_foreach(profile->threads_tbl, mark_threads, 0);
380
+ st_foreach(profile->exclude_methods_tbl, mark_methods, 0);
338
381
  }
339
382
 
340
383
  /* Freeing the profile creates a cascade of freeing.
@@ -358,6 +401,10 @@ prof_free(prof_profile_t *profile)
358
401
  profile->include_threads_tbl = NULL;
359
402
  }
360
403
 
404
+ /* This table owns the excluded sentinels for now. */
405
+ method_table_free(profile->exclude_methods_tbl);
406
+ profile->exclude_methods_tbl = NULL;
407
+
361
408
  xfree(profile->measurer);
362
409
  profile->measurer = NULL;
363
410
 
@@ -375,6 +422,8 @@ prof_allocate(VALUE klass)
375
422
  profile->include_threads_tbl = NULL;
376
423
  profile->running = Qfalse;
377
424
  profile->merge_fibers = 0;
425
+ profile->exclude_methods_tbl = method_table_create();
426
+ profile->running = Qfalse;
378
427
  return result;
379
428
  }
380
429
 
@@ -401,6 +450,7 @@ prof_initialize(int argc, VALUE *argv, VALUE self)
401
450
  VALUE exclude_threads = Qnil;
402
451
  VALUE include_threads = Qnil;
403
452
  VALUE merge_fibers = Qnil;
453
+ VALUE exclude_common = Qnil;
404
454
  int i;
405
455
 
406
456
  switch (rb_scan_args(argc, argv, "02", &mode_or_options, &exclude_threads)) {
@@ -414,6 +464,7 @@ prof_initialize(int argc, VALUE *argv, VALUE self)
414
464
  Check_Type(mode_or_options, T_HASH);
415
465
  mode = rb_hash_aref(mode_or_options, ID2SYM(rb_intern("measure_mode")));
416
466
  merge_fibers = rb_hash_aref(mode_or_options, ID2SYM(rb_intern("merge_fibers")));
467
+ exclude_common = rb_hash_aref(mode_or_options, ID2SYM(rb_intern("exclude_common")));
417
468
  exclude_threads = rb_hash_aref(mode_or_options, ID2SYM(rb_intern("exclude_threads")));
418
469
  include_threads = rb_hash_aref(mode_or_options, ID2SYM(rb_intern("include_threads")));
419
470
  }
@@ -453,6 +504,10 @@ prof_initialize(int argc, VALUE *argv, VALUE self)
453
504
  }
454
505
  }
455
506
 
507
+ if (RTEST(exclude_common)) {
508
+ prof_exclude_common_methods(self);
509
+ }
510
+
456
511
  return self;
457
512
  }
458
513
 
@@ -581,11 +636,11 @@ prof_stop(VALUE self)
581
636
  {
582
637
  rb_raise(rb_eRuntimeError, "RubyProf.start was not yet called");
583
638
  }
584
-
585
- prof_remove_hook();
639
+
640
+ prof_remove_hook(self);
586
641
 
587
642
  /* close trace file if open */
588
- if (trace_file != NULL)
643
+ if (trace_file != NULL)
589
644
  {
590
645
  if (trace_file !=stderr && trace_file != stdout)
591
646
  {
@@ -597,7 +652,7 @@ prof_stop(VALUE self)
597
652
  }
598
653
  trace_file = NULL;
599
654
  }
600
-
655
+
601
656
  prof_pop_threads(profile);
602
657
 
603
658
  /* Unset the last_thread_data (very important!)
@@ -605,12 +660,23 @@ prof_stop(VALUE self)
605
660
  profile->running = profile->paused = Qfalse;
606
661
  profile->last_thread_data = NULL;
607
662
 
608
- /* Post process result */
609
- rb_funcall(self, rb_intern("post_process") , 0);
610
-
611
663
  return self;
612
664
  }
613
665
 
666
+ /* call-seq:
667
+ threads -> Array of RubyProf::Thread
668
+
669
+ Returns an array of RubyProf::Thread instances that were executed
670
+ while the the program was being run. */
671
+ static VALUE
672
+ prof_threads(VALUE self)
673
+ {
674
+ VALUE result = rb_ary_new();
675
+ prof_profile_t* profile = prof_get_profile(self);
676
+ st_foreach(profile->threads_tbl, collect_threads, result);
677
+ return result;
678
+ }
679
+
614
680
  /* call-seq:
615
681
  profile(&block) -> self
616
682
  profile(options, &block) -> self
@@ -619,7 +685,7 @@ Profiles the specified block and returns a RubyProf::Profile
619
685
  object. Arguments are passed to Profile initialize method.
620
686
  */
621
687
  static VALUE
622
- prof_profile(int argc, VALUE *argv, VALUE klass)
688
+ prof_profile_class(int argc, VALUE *argv, VALUE klass)
623
689
  {
624
690
  int result;
625
691
  VALUE profile = rb_class_new_instance(argc, argv, cProfile);
@@ -635,17 +701,47 @@ prof_profile(int argc, VALUE *argv, VALUE klass)
635
701
  }
636
702
 
637
703
  /* call-seq:
638
- threads -> array of RubyProf::Thread
704
+ profile {block} -> RubyProf::Result
639
705
 
640
- Returns an array of RubyProf::Thread instances that were executed
641
- while the the program was being run. */
706
+ Profiles the specified block and returns a RubyProf::Result object. */
642
707
  static VALUE
643
- prof_threads(VALUE self)
708
+ prof_profile_object(VALUE self)
709
+ {
710
+ int result;
711
+ if (!rb_block_given_p())
712
+ {
713
+ rb_raise(rb_eArgError, "A block must be provided to the profile method.");
714
+ }
715
+
716
+ prof_start(self);
717
+ rb_protect(rb_yield, self, &result);
718
+ return prof_stop(self);
719
+
720
+ }
721
+
722
+ static VALUE
723
+ prof_exclude_method(VALUE self, VALUE klass, VALUE sym)
644
724
  {
645
- VALUE result = rb_ary_new();
646
725
  prof_profile_t* profile = prof_get_profile(self);
647
- st_foreach(profile->threads_tbl, collect_threads, result);
648
- return result;
726
+ ID mid = SYM2ID(sym);
727
+
728
+ prof_method_key_t key;
729
+ prof_method_t *method;
730
+
731
+ if (profile->running == Qtrue)
732
+ {
733
+ rb_raise(rb_eRuntimeError, "RubyProf.start was already called");
734
+ }
735
+
736
+ method_key(&key, klass, mid);
737
+ method = method_table_lookup(profile->exclude_methods_tbl, &key);
738
+
739
+ if (!method) {
740
+ method = prof_method_create_excluded(klass, mid);
741
+ method_table_insert(profile->exclude_methods_tbl, method->key, method);
742
+ }
743
+
744
+ return self;
649
745
  }
650
746
 
651
747
  void Init_ruby_prof()
@@ -658,8 +754,8 @@ void Init_ruby_prof()
658
754
  rp_init_thread();
659
755
 
660
756
  cProfile = rb_define_class_under(mProf, "Profile", rb_cObject);
661
- rb_define_singleton_method(cProfile, "profile", prof_profile, -1);
662
757
  rb_define_alloc_func (cProfile, prof_allocate);
758
+
663
759
  rb_define_method(cProfile, "initialize", prof_initialize, -1);
664
760
  rb_define_method(cProfile, "start", prof_start, 0);
665
761
  rb_define_method(cProfile, "stop", prof_stop, 0);
@@ -668,4 +764,11 @@ void Init_ruby_prof()
668
764
  rb_define_method(cProfile, "running?", prof_running, 0);
669
765
  rb_define_method(cProfile, "paused?", prof_paused, 0);
670
766
  rb_define_method(cProfile, "threads", prof_threads, 0);
767
+
768
+ rb_define_singleton_method(cProfile, "profile", prof_profile_class, -1);
769
+ rb_define_method(cProfile, "profile", prof_profile_object, 0);
770
+
771
+ rb_define_method(cProfile, "exclude_method!", prof_exclude_method, 2);
772
+
773
+ cExcludeCommonMethods = rb_define_class_under(cProfile, "ExcludeCommonMethods", rb_cObject);
671
774
  }