ruby-prof 1.6.2-x64-mingw-ucrt → 1.7.0-x64-mingw-ucrt

Sign up to get free protection for your applications and to get access to all the features.
Files changed (42) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES +11 -0
  3. data/bin/ruby-prof +0 -5
  4. data/ext/ruby_prof/rp_allocation.c +342 -342
  5. data/ext/ruby_prof/rp_call_tree.c +1 -1
  6. data/ext/ruby_prof/rp_call_tree.h +1 -1
  7. data/ext/ruby_prof/rp_call_trees.c +2 -2
  8. data/ext/ruby_prof/rp_call_trees.h +2 -2
  9. data/ext/ruby_prof/rp_measure_allocations.c +1 -1
  10. data/ext/ruby_prof/rp_measure_memory.c +46 -46
  11. data/ext/ruby_prof/rp_measure_process_time.c +1 -1
  12. data/ext/ruby_prof/rp_measure_wall_time.c +1 -1
  13. data/ext/ruby_prof/rp_measurement.c +364 -364
  14. data/ext/ruby_prof/rp_method.c +24 -12
  15. data/ext/ruby_prof/rp_method.h +5 -2
  16. data/ext/ruby_prof/rp_profile.c +2 -2
  17. data/ext/ruby_prof/rp_profile.h +36 -36
  18. data/ext/ruby_prof/rp_stack.c +1 -1
  19. data/ext/ruby_prof/rp_thread.c +1 -1
  20. data/ext/ruby_prof/ruby_prof.c +1 -1
  21. data/ext/ruby_prof/ruby_prof.h +34 -34
  22. data/ext/ruby_prof/vc/ruby_prof.vcxproj +5 -7
  23. data/lib/3.2/ruby_prof.so +0 -0
  24. data/lib/3.3/ruby_prof.so +0 -0
  25. data/lib/ruby-prof/compatibility.rb +10 -10
  26. data/lib/ruby-prof/exclude_common_methods.rb +9 -3
  27. data/lib/ruby-prof/method_info.rb +87 -85
  28. data/lib/ruby-prof/version.rb +1 -1
  29. data/ruby-prof.gemspec +1 -1
  30. data/test/crash2.rb +144 -0
  31. data/test/enumerable_test.rb +5 -5
  32. data/test/exclude_methods_test.rb +197 -86
  33. data/test/line_number_test.rb +254 -99
  34. data/test/measure_allocations_test.rb +422 -1
  35. data/test/measure_memory_test.rb +433 -1
  36. data/test/measure_process_time_test.rb +882 -15
  37. data/test/measure_wall_time_test.rb +195 -47
  38. data/test/method_info_test.rb +1 -1
  39. data/test/recursive_test.rb +198 -1
  40. data/test/thread_test.rb +0 -4
  41. metadata +7 -6
  42. data/lib/3.1/ruby_prof.so +0 -0
@@ -480,7 +480,7 @@ static VALUE prof_call_tree_load(VALUE self, VALUE data)
480
480
  return data;
481
481
  }
482
482
 
483
- void rp_init_call_tree()
483
+ void rp_init_call_tree(void)
484
484
  {
485
485
  /* CallTree */
486
486
  cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cObject);
@@ -42,6 +42,6 @@ prof_call_tree_t* prof_get_call_tree(VALUE self);
42
42
  VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree);
43
43
  void prof_call_tree_free(prof_call_tree_t* call_tree);
44
44
 
45
- void rp_init_call_tree();
45
+ void rp_init_call_tree(void);
46
46
 
47
47
  #endif //__RP_CALL_TREE_H__
@@ -21,7 +21,7 @@ prof_call_trees_t* prof_get_call_trees(VALUE self)
21
21
  return result;
22
22
  }
23
23
 
24
- prof_call_trees_t* prof_call_trees_create()
24
+ prof_call_trees_t* prof_call_trees_create(void)
25
25
  {
26
26
  prof_call_trees_t* result = ALLOC(prof_call_trees_t);
27
27
  result->start = ALLOC_N(prof_call_tree_t*, INITIAL_CALL_TREES_SIZE);
@@ -279,7 +279,7 @@ VALUE prof_call_trees_load(VALUE self, VALUE data)
279
279
  return data;
280
280
  }
281
281
 
282
- void rp_init_call_trees()
282
+ void rp_init_call_trees(void)
283
283
  {
284
284
  cRpCallTrees = rb_define_class_under(mProf, "CallTrees", rb_cObject);
285
285
  rb_undef_method(CLASS_OF(cRpCallTrees), "new");
@@ -18,8 +18,8 @@ typedef struct prof_call_trees_t
18
18
  } prof_call_trees_t;
19
19
 
20
20
 
21
- void rp_init_call_trees();
22
- prof_call_trees_t* prof_call_trees_create();
21
+ void rp_init_call_trees(void);
22
+ prof_call_trees_t* prof_call_trees_create(void);
23
23
  void prof_call_trees_free(prof_call_trees_t* call_trees);
24
24
  prof_call_trees_t* prof_get_call_trees(VALUE self);
25
25
  void prof_add_call_tree(prof_call_trees_t* call_trees, prof_call_tree_t* call_tree);
@@ -38,7 +38,7 @@ prof_measurer_t* prof_measurer_allocations(bool track_allocations)
38
38
  return measure;
39
39
  }
40
40
 
41
- void rp_init_measure_allocations()
41
+ void rp_init_measure_allocations(void)
42
42
  {
43
43
  total_allocated_objects_key = ID2SYM(rb_intern("total_allocated_objects"));
44
44
  rb_define_const(mProf, "ALLOCATIONS", INT2NUM(MEASURE_ALLOCATIONS));
@@ -1,46 +1,46 @@
1
- /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
- Please see the LICENSE file for copyright and distribution information */
3
-
4
- /* :nodoc: */
5
-
6
- #include "rp_measurement.h"
7
-
8
- static VALUE cMeasureMemory;
9
-
10
- static double measure_memory(rb_trace_arg_t* trace_arg)
11
- {
12
- static double result = 0;
13
-
14
- if (trace_arg)
15
- {
16
- // Only process creation of new objects
17
- rb_event_flag_t event = rb_tracearg_event_flag(trace_arg);
18
- if (event == RUBY_INTERNAL_EVENT_NEWOBJ)
19
- {
20
- // Don't count allocations of internal IMemo objects
21
- VALUE object = rb_tracearg_object(trace_arg);
22
- if (BUILTIN_TYPE(object) != T_IMEMO)
23
- result += rb_obj_memsize_of(object);
24
- }
25
- }
26
-
27
- return result;
28
- }
29
-
30
- prof_measurer_t* prof_measurer_memory(bool track_allocations)
31
- {
32
- prof_measurer_t* measure = ALLOC(prof_measurer_t);
33
- measure->mode = MEASURE_MEMORY;
34
- measure->measure = measure_memory;
35
- measure->multiplier = 1;
36
- // Need to track allocations to get RUBY_INTERNAL_EVENT_NEWOBJ event
37
- measure->track_allocations = true;
38
- return measure;
39
- }
40
-
41
- void rp_init_measure_memory()
42
- {
43
- rb_define_const(mProf, "MEMORY", INT2NUM(MEASURE_MEMORY));
44
-
45
- cMeasureMemory = rb_define_class_under(mMeasure, "Allocations", rb_cObject);
46
- }
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ /* :nodoc: */
5
+
6
+ #include "rp_measurement.h"
7
+
8
+ static VALUE cMeasureMemory;
9
+
10
+ static double measure_memory(rb_trace_arg_t* trace_arg)
11
+ {
12
+ static double result = 0;
13
+
14
+ if (trace_arg)
15
+ {
16
+ // Only process creation of new objects
17
+ rb_event_flag_t event = rb_tracearg_event_flag(trace_arg);
18
+ if (event == RUBY_INTERNAL_EVENT_NEWOBJ)
19
+ {
20
+ // Don't count allocations of internal IMemo objects
21
+ VALUE object = rb_tracearg_object(trace_arg);
22
+ if (BUILTIN_TYPE(object) != T_IMEMO)
23
+ result += rb_obj_memsize_of(object);
24
+ }
25
+ }
26
+
27
+ return result;
28
+ }
29
+
30
+ prof_measurer_t* prof_measurer_memory(bool track_allocations)
31
+ {
32
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
33
+ measure->mode = MEASURE_MEMORY;
34
+ measure->measure = measure_memory;
35
+ measure->multiplier = 1;
36
+ // Need to track allocations to get RUBY_INTERNAL_EVENT_NEWOBJ event
37
+ measure->track_allocations = true;
38
+ return measure;
39
+ }
40
+
41
+ void rp_init_measure_memory(void)
42
+ {
43
+ rb_define_const(mProf, "MEMORY", INT2NUM(MEASURE_MEMORY));
44
+
45
+ cMeasureMemory = rb_define_class_under(mMeasure, "Allocations", rb_cObject);
46
+ }
@@ -57,7 +57,7 @@ prof_measurer_t* prof_measurer_process_time(bool track_allocations)
57
57
  return measure;
58
58
  }
59
59
 
60
- void rp_init_measure_process_time()
60
+ void rp_init_measure_process_time(void)
61
61
  {
62
62
  rb_define_const(mProf, "CLOCKS_PER_SEC", INT2NUM(CLOCKS_PER_SEC));
63
63
  rb_define_const(mProf, "PROCESS_TIME", INT2NUM(MEASURE_PROCESS_TIME));
@@ -56,7 +56,7 @@ prof_measurer_t* prof_measurer_wall_time(bool track_allocations)
56
56
  return measure;
57
57
  }
58
58
 
59
- void rp_init_measure_wall_time()
59
+ void rp_init_measure_wall_time(void)
60
60
  {
61
61
  rb_define_const(mProf, "WALL_TIME", INT2NUM(MEASURE_WALL_TIME));
62
62