ruby-prof 1.4.4-x64-mingw-ucrt

Sign up to get free protection for your applications and to get access to all the features.
Files changed (106) hide show
  1. checksums.yaml +7 -0
  2. data/CHANGES +608 -0
  3. data/LICENSE +25 -0
  4. data/README.md +5 -0
  5. data/Rakefile +98 -0
  6. data/bin/ruby-prof +328 -0
  7. data/bin/ruby-prof-check-trace +45 -0
  8. data/ext/ruby_prof/extconf.rb +22 -0
  9. data/ext/ruby_prof/rp_aggregate_call_tree.c +59 -0
  10. data/ext/ruby_prof/rp_aggregate_call_tree.h +13 -0
  11. data/ext/ruby_prof/rp_allocation.c +287 -0
  12. data/ext/ruby_prof/rp_allocation.h +31 -0
  13. data/ext/ruby_prof/rp_call_tree.c +367 -0
  14. data/ext/ruby_prof/rp_call_tree.h +43 -0
  15. data/ext/ruby_prof/rp_call_trees.c +288 -0
  16. data/ext/ruby_prof/rp_call_trees.h +28 -0
  17. data/ext/ruby_prof/rp_measure_allocations.c +47 -0
  18. data/ext/ruby_prof/rp_measure_memory.c +46 -0
  19. data/ext/ruby_prof/rp_measure_process_time.c +66 -0
  20. data/ext/ruby_prof/rp_measure_wall_time.c +64 -0
  21. data/ext/ruby_prof/rp_measurement.c +237 -0
  22. data/ext/ruby_prof/rp_measurement.h +50 -0
  23. data/ext/ruby_prof/rp_method.c +491 -0
  24. data/ext/ruby_prof/rp_method.h +62 -0
  25. data/ext/ruby_prof/rp_profile.c +915 -0
  26. data/ext/ruby_prof/rp_profile.h +35 -0
  27. data/ext/ruby_prof/rp_stack.c +212 -0
  28. data/ext/ruby_prof/rp_stack.h +53 -0
  29. data/ext/ruby_prof/rp_thread.c +362 -0
  30. data/ext/ruby_prof/rp_thread.h +39 -0
  31. data/ext/ruby_prof/ruby_prof.c +52 -0
  32. data/ext/ruby_prof/ruby_prof.h +26 -0
  33. data/ext/ruby_prof/vc/ruby_prof.sln +39 -0
  34. data/ext/ruby_prof/vc/ruby_prof.vcxproj +160 -0
  35. data/lib/3.1/ruby_prof.so +0 -0
  36. data/lib/ruby-prof/assets/call_stack_printer.html.erb +711 -0
  37. data/lib/ruby-prof/assets/call_stack_printer.png +0 -0
  38. data/lib/ruby-prof/assets/graph_printer.html.erb +355 -0
  39. data/lib/ruby-prof/call_tree.rb +57 -0
  40. data/lib/ruby-prof/call_tree_visitor.rb +36 -0
  41. data/lib/ruby-prof/compatibility.rb +99 -0
  42. data/lib/ruby-prof/exclude_common_methods.rb +198 -0
  43. data/lib/ruby-prof/measurement.rb +17 -0
  44. data/lib/ruby-prof/method_info.rb +78 -0
  45. data/lib/ruby-prof/printers/abstract_printer.rb +137 -0
  46. data/lib/ruby-prof/printers/call_info_printer.rb +53 -0
  47. data/lib/ruby-prof/printers/call_stack_printer.rb +180 -0
  48. data/lib/ruby-prof/printers/call_tree_printer.rb +147 -0
  49. data/lib/ruby-prof/printers/dot_printer.rb +132 -0
  50. data/lib/ruby-prof/printers/flat_printer.rb +53 -0
  51. data/lib/ruby-prof/printers/graph_html_printer.rb +63 -0
  52. data/lib/ruby-prof/printers/graph_printer.rb +113 -0
  53. data/lib/ruby-prof/printers/multi_printer.rb +127 -0
  54. data/lib/ruby-prof/profile.rb +37 -0
  55. data/lib/ruby-prof/rack.rb +95 -0
  56. data/lib/ruby-prof/task.rb +147 -0
  57. data/lib/ruby-prof/thread.rb +20 -0
  58. data/lib/ruby-prof/version.rb +3 -0
  59. data/lib/ruby-prof.rb +52 -0
  60. data/lib/unprof.rb +10 -0
  61. data/ruby-prof.gemspec +64 -0
  62. data/test/abstract_printer_test.rb +26 -0
  63. data/test/alias_test.rb +122 -0
  64. data/test/basic_test.rb +43 -0
  65. data/test/call_tree_visitor_test.rb +32 -0
  66. data/test/call_trees_test.rb +66 -0
  67. data/test/duplicate_names_test.rb +32 -0
  68. data/test/dynamic_method_test.rb +67 -0
  69. data/test/enumerable_test.rb +21 -0
  70. data/test/exceptions_test.rb +24 -0
  71. data/test/exclude_methods_test.rb +151 -0
  72. data/test/exclude_threads_test.rb +53 -0
  73. data/test/fiber_test.rb +129 -0
  74. data/test/gc_test.rb +100 -0
  75. data/test/inverse_call_tree_test.rb +175 -0
  76. data/test/line_number_test.rb +158 -0
  77. data/test/marshal_test.rb +145 -0
  78. data/test/measure_allocations.rb +26 -0
  79. data/test/measure_allocations_test.rb +333 -0
  80. data/test/measure_memory_test.rb +688 -0
  81. data/test/measure_process_time_test.rb +1614 -0
  82. data/test/measure_times.rb +56 -0
  83. data/test/measure_wall_time_test.rb +426 -0
  84. data/test/multi_printer_test.rb +71 -0
  85. data/test/no_method_class_test.rb +15 -0
  86. data/test/pause_resume_test.rb +175 -0
  87. data/test/prime.rb +54 -0
  88. data/test/prime_script.rb +6 -0
  89. data/test/printer_call_stack_test.rb +27 -0
  90. data/test/printer_call_tree_test.rb +30 -0
  91. data/test/printer_flat_test.rb +99 -0
  92. data/test/printer_graph_html_test.rb +59 -0
  93. data/test/printer_graph_test.rb +40 -0
  94. data/test/printers_test.rb +141 -0
  95. data/test/printing_recursive_graph_test.rb +81 -0
  96. data/test/profile_test.rb +16 -0
  97. data/test/rack_test.rb +93 -0
  98. data/test/recursive_test.rb +430 -0
  99. data/test/singleton_test.rb +38 -0
  100. data/test/stack_printer_test.rb +64 -0
  101. data/test/start_stop_test.rb +109 -0
  102. data/test/test_helper.rb +13 -0
  103. data/test/thread_test.rb +144 -0
  104. data/test/unique_call_path_test.rb +136 -0
  105. data/test/yarv_test.rb +60 -0
  106. metadata +187 -0
@@ -0,0 +1,43 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #ifndef __RP_CALL_TREE_H__
5
+ #define __RP_CALL_TREE_H__
6
+
7
+ #include "ruby_prof.h"
8
+ #include "rp_measurement.h"
9
+ #include "rp_method.h"
10
+
11
+ extern VALUE cRpCallTree;
12
+
13
+ /* Callers and callee information for a method. */
14
+ typedef struct prof_call_tree_t
15
+ {
16
+ prof_method_t* method;
17
+ struct prof_call_tree_t* parent;
18
+ st_table* children; /* Call infos that this call info calls */
19
+ prof_measurement_t* measurement;
20
+ VALUE object;
21
+
22
+ int visits; /* Current visits on the stack */
23
+
24
+ unsigned int source_line;
25
+ VALUE source_file;
26
+ } prof_call_tree_t;
27
+
28
+ prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t* parent, VALUE source_file, int source_line);
29
+ prof_call_tree_t* prof_call_tree_copy(prof_call_tree_t* other);
30
+ void prof_call_tree_merge(prof_call_tree_t* result, prof_call_tree_t* other);
31
+ void prof_call_tree_mark(void* data);
32
+ prof_call_tree_t* call_tree_table_lookup(st_table* table, st_data_t key);
33
+
34
+ void prof_call_tree_add_parent(prof_call_tree_t* self, prof_call_tree_t* parent);
35
+ void prof_call_tree_add_child(prof_call_tree_t* self, prof_call_tree_t* child);
36
+
37
+ uint32_t prof_call_figure_depth(prof_call_tree_t* call_tree_data);
38
+ prof_call_tree_t* prof_get_call_tree(VALUE self);
39
+ VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree);
40
+ void prof_call_tree_free(prof_call_tree_t* call_tree);
41
+ void rp_init_call_tree();
42
+
43
+ #endif //__RP_CALL_TREE_H__
@@ -0,0 +1,288 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_aggregate_call_tree.h"
5
+ #include "rp_call_trees.h"
6
+ #include "rp_measurement.h"
7
+
8
+ #define INITIAL_CALL_TREES_SIZE 2
9
+
10
+ VALUE cRpCallTrees;
11
+
12
+ /* ======= Call Infos ========*/
13
+ prof_call_trees_t* prof_get_call_trees(VALUE self)
14
+ {
15
+ /* Can't use Data_Get_Struct because that triggers the event hook
16
+ ending up in endless recursion. */
17
+ prof_call_trees_t* result = RTYPEDDATA_DATA(self);
18
+
19
+ if (!result)
20
+ rb_raise(rb_eRuntimeError, "This RubyProf::CallTrees instance has already been freed, likely because its profile has been freed.");
21
+
22
+ return result;
23
+ }
24
+
25
+ prof_call_trees_t* prof_call_trees_create()
26
+ {
27
+ prof_call_trees_t* result = ALLOC(prof_call_trees_t);
28
+ result->start = ALLOC_N(prof_call_tree_t*, INITIAL_CALL_TREES_SIZE);
29
+ result->end = result->start + INITIAL_CALL_TREES_SIZE;
30
+ result->ptr = result->start;
31
+ result->object = Qnil;
32
+ return result;
33
+ }
34
+
35
+ void prof_call_trees_mark(void* data)
36
+ {
37
+ if (!data) return;
38
+
39
+ prof_call_trees_t* call_trees = (prof_call_trees_t*)data;
40
+ prof_call_tree_t** call_tree;
41
+ for (call_tree = call_trees->start; call_tree < call_trees->ptr; call_tree++)
42
+ {
43
+ prof_call_tree_mark(*call_tree);
44
+ }
45
+ }
46
+
47
+ void prof_call_trees_free(prof_call_trees_t* call_trees)
48
+ {
49
+ /* Has this method object been accessed by Ruby? If
50
+ yes clean it up so to avoid a segmentation fault. */
51
+ if (call_trees->object != Qnil)
52
+ {
53
+ RTYPEDDATA(call_trees->object)->data = NULL;
54
+ call_trees->object = Qnil;
55
+ }
56
+
57
+ // Note we do not free our call_tree structures - since they have no parents they will free themselves
58
+ xfree(call_trees);
59
+ }
60
+
61
+ void prof_call_trees_ruby_gc_free(void* data)
62
+ {
63
+ if (data)
64
+ {
65
+ // This object gets freed by its owning method
66
+ prof_call_trees_t* call_trees = (prof_call_trees_t*)data;
67
+ call_trees->object = Qnil;
68
+ }
69
+ }
70
+
71
+ static int prof_call_trees_collect_aggregates(st_data_t key, st_data_t value, st_data_t data)
72
+ {
73
+ VALUE result = (VALUE)data;
74
+ prof_call_tree_t* call_tree_data = (prof_call_tree_t*)value;
75
+ VALUE aggregate_call_tree = prof_aggregate_call_tree_wrap(call_tree_data);
76
+ rb_ary_push(result, aggregate_call_tree);
77
+
78
+ return ST_CONTINUE;
79
+ }
80
+
81
+ static int prof_call_trees_collect_callees(st_data_t key, st_data_t value, st_data_t hash)
82
+ {
83
+ st_table* callers = (st_table*)hash;
84
+ prof_call_tree_t* call_tree_data = (prof_call_tree_t*)value;
85
+
86
+ prof_call_tree_t* aggregate_call_tree_data = NULL;
87
+
88
+ if (rb_st_lookup(callers, call_tree_data->method->key, (st_data_t*)&aggregate_call_tree_data))
89
+ {
90
+ prof_call_tree_merge(aggregate_call_tree_data, call_tree_data);
91
+ }
92
+ else
93
+ {
94
+ aggregate_call_tree_data = prof_call_tree_copy(call_tree_data);
95
+ rb_st_insert(callers, call_tree_data->method->key, (st_data_t)aggregate_call_tree_data);
96
+ }
97
+
98
+ return ST_CONTINUE;
99
+ }
100
+
101
+ size_t prof_call_trees_size(const void* data)
102
+ {
103
+ return sizeof(prof_call_trees_t);
104
+ }
105
+
106
+ static const rb_data_type_t call_trees_type =
107
+ {
108
+ .wrap_struct_name = "CallTrees",
109
+ .function =
110
+ {
111
+ .dmark = prof_call_trees_mark,
112
+ .dfree = prof_call_trees_ruby_gc_free,
113
+ .dsize = prof_call_trees_size,
114
+ },
115
+ .data = NULL,
116
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
117
+ };
118
+
119
+ VALUE prof_call_trees_wrap(prof_call_trees_t* call_trees)
120
+ {
121
+ if (call_trees->object == Qnil)
122
+ {
123
+ call_trees->object = TypedData_Wrap_Struct(cRpCallTrees, &call_trees_type, call_trees);
124
+ }
125
+ return call_trees->object;
126
+ }
127
+
128
+ void prof_add_call_tree(prof_call_trees_t* call_trees, prof_call_tree_t* call_tree)
129
+ {
130
+ if (call_trees->ptr == call_trees->end)
131
+ {
132
+ size_t len = call_trees->ptr - call_trees->start;
133
+ size_t new_capacity = (call_trees->end - call_trees->start) * 2;
134
+ REALLOC_N(call_trees->start, prof_call_tree_t*, new_capacity);
135
+ call_trees->ptr = call_trees->start + len;
136
+ call_trees->end = call_trees->start + new_capacity;
137
+ }
138
+ *call_trees->ptr = call_tree;
139
+ call_trees->ptr++;
140
+ }
141
+
142
+ /* ================ Call Infos =================*/
143
+ /* Document-class: RubyProf::CallTrees
144
+ The RubyProf::MethodInfo class stores profiling data for a method.
145
+ One instance of the RubyProf::MethodInfo class is created per method
146
+ called per thread. Thus, if a method is called in two different
147
+ thread then there will be two RubyProf::MethodInfo objects
148
+ created. RubyProf::MethodInfo objects can be accessed via
149
+ the RubyProf::Profile object. */
150
+ VALUE prof_call_trees_allocate(VALUE klass)
151
+ {
152
+ prof_call_trees_t* call_trees_data = prof_call_trees_create();
153
+ call_trees_data->object = prof_call_trees_wrap(call_trees_data);
154
+ return call_trees_data->object;
155
+ }
156
+
157
+
158
+ /* call-seq:
159
+ min_depth -> Integer
160
+
161
+ Returns the minimum depth of this method in any call tree */
162
+ VALUE prof_call_trees_min_depth(VALUE self)
163
+ {
164
+ unsigned int depth = INT_MAX;
165
+
166
+ prof_call_trees_t* call_trees = prof_get_call_trees(self);
167
+ for (prof_call_tree_t** p_call_tree = call_trees->start; p_call_tree < call_trees->ptr; p_call_tree++)
168
+ {
169
+ unsigned int call_tree_depth = prof_call_figure_depth(*p_call_tree);
170
+ if (call_tree_depth < depth)
171
+ depth = call_tree_depth;
172
+ }
173
+
174
+ return UINT2NUM(depth);
175
+ }
176
+
177
+ /* call-seq:
178
+ callers -> array
179
+
180
+ Returns an array of all CallTree objects that called this method. */
181
+ VALUE prof_call_trees_call_trees(VALUE self)
182
+ {
183
+ VALUE result = rb_ary_new();
184
+
185
+ prof_call_trees_t* call_trees = prof_get_call_trees(self);
186
+ for (prof_call_tree_t** p_call_tree = call_trees->start; p_call_tree < call_trees->ptr; p_call_tree++)
187
+ {
188
+ VALUE call_tree = prof_call_tree_wrap(*p_call_tree);
189
+ rb_ary_push(result, call_tree);
190
+ }
191
+ return result;
192
+ }
193
+
194
+ /* call-seq:
195
+ callers -> array
196
+
197
+ Returns an array of aggregated CallTree objects that called this method (ie, parents).*/
198
+ VALUE prof_call_trees_callers(VALUE self)
199
+ {
200
+ st_table* callers = rb_st_init_numtable();
201
+
202
+ prof_call_trees_t* call_trees = prof_get_call_trees(self);
203
+ for (prof_call_tree_t** p_call_tree = call_trees->start; p_call_tree < call_trees->ptr; p_call_tree++)
204
+ {
205
+ prof_call_tree_t* parent = (*p_call_tree)->parent;
206
+ if (parent == NULL)
207
+ continue;
208
+
209
+ prof_call_tree_t* aggregate_call_tree_data = NULL;
210
+
211
+ if (rb_st_lookup(callers, parent->method->key, (st_data_t*)&aggregate_call_tree_data))
212
+ {
213
+ prof_call_tree_merge(aggregate_call_tree_data, *p_call_tree);
214
+ }
215
+ else
216
+ {
217
+ aggregate_call_tree_data = prof_call_tree_copy(*p_call_tree);
218
+ rb_st_insert(callers, parent->method->key, (st_data_t)aggregate_call_tree_data);
219
+ }
220
+ }
221
+
222
+ VALUE result = rb_ary_new_capa((long)callers->num_entries);
223
+ rb_st_foreach(callers, prof_call_trees_collect_aggregates, result);
224
+ rb_st_free_table(callers);
225
+ return result;
226
+ }
227
+
228
+ /* call-seq:
229
+ callees -> array
230
+
231
+ Returns an array of aggregated CallTree objects that this method called (ie, children).*/
232
+ VALUE prof_call_trees_callees(VALUE self)
233
+ {
234
+ st_table* callees = rb_st_init_numtable();
235
+
236
+ prof_call_trees_t* call_trees = prof_get_call_trees(self);
237
+ for (prof_call_tree_t** call_tree = call_trees->start; call_tree < call_trees->ptr; call_tree++)
238
+ {
239
+ rb_st_foreach((*call_tree)->children, prof_call_trees_collect_callees, (st_data_t)callees);
240
+ }
241
+
242
+ VALUE result = rb_ary_new_capa((long)callees->num_entries);
243
+ rb_st_foreach(callees, prof_call_trees_collect_aggregates, result);
244
+ rb_st_free_table(callees);
245
+ return result;
246
+ }
247
+
248
+ /* :nodoc: */
249
+ VALUE prof_call_trees_dump(VALUE self)
250
+ {
251
+ VALUE result = rb_hash_new();
252
+ rb_hash_aset(result, ID2SYM(rb_intern("call_trees")), prof_call_trees_call_trees(self));
253
+
254
+ return result;
255
+ }
256
+
257
+ /* :nodoc: */
258
+ VALUE prof_call_trees_load(VALUE self, VALUE data)
259
+ {
260
+ prof_call_trees_t* call_trees_data = prof_get_call_trees(self);
261
+ call_trees_data->object = self;
262
+
263
+ VALUE call_trees = rb_hash_aref(data, ID2SYM(rb_intern("call_trees")));
264
+ for (int i = 0; i < rb_array_len(call_trees); i++)
265
+ {
266
+ VALUE call_tree = rb_ary_entry(call_trees, i);
267
+ prof_call_tree_t* call_tree_data = prof_get_call_tree(call_tree);
268
+ prof_add_call_tree(call_trees_data, call_tree_data);
269
+ }
270
+
271
+ return data;
272
+ }
273
+
274
+ void rp_init_call_trees()
275
+ {
276
+ cRpCallTrees = rb_define_class_under(mProf, "CallTrees", rb_cObject);
277
+ rb_undef_method(CLASS_OF(cRpCallTrees), "new");
278
+ rb_define_alloc_func(cRpCallTrees, prof_call_trees_allocate);
279
+
280
+ rb_define_method(cRpCallTrees, "min_depth", prof_call_trees_min_depth, 0);
281
+
282
+ rb_define_method(cRpCallTrees, "call_trees", prof_call_trees_call_trees, 0);
283
+ rb_define_method(cRpCallTrees, "callers", prof_call_trees_callers, 0);
284
+ rb_define_method(cRpCallTrees, "callees", prof_call_trees_callees, 0);
285
+
286
+ rb_define_method(cRpCallTrees, "_dump_data", prof_call_trees_dump, 0);
287
+ rb_define_method(cRpCallTrees, "_load_data", prof_call_trees_load, 1);
288
+ }
@@ -0,0 +1,28 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #ifndef __RP_CALL_TREES_H__
5
+ #define __RP_CALL_TREES_H__
6
+
7
+ #include "ruby_prof.h"
8
+ #include "rp_call_tree.h"
9
+
10
+ /* Array of call_tree objects */
11
+ typedef struct prof_call_trees_t
12
+ {
13
+ prof_call_tree_t** start;
14
+ prof_call_tree_t** end;
15
+ prof_call_tree_t** ptr;
16
+
17
+ VALUE object;
18
+ } prof_call_trees_t;
19
+
20
+
21
+ void rp_init_call_trees();
22
+ prof_call_trees_t* prof_call_trees_create();
23
+ void prof_call_trees_free(prof_call_trees_t* call_trees);
24
+ prof_call_trees_t* prof_get_call_trees(VALUE self);
25
+ void prof_add_call_tree(prof_call_trees_t* call_trees, prof_call_tree_t* call_tree);
26
+ VALUE prof_call_trees_wrap(prof_call_trees_t* call_trees);
27
+
28
+ #endif //__RP_CALL_TREES_H__
@@ -0,0 +1,47 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ /* :nodoc: */
5
+
6
+ #include "rp_measurement.h"
7
+
8
+ static VALUE cMeasureAllocations;
9
+ VALUE total_allocated_objects_key;
10
+
11
+ static double measure_allocations(rb_trace_arg_t* trace_arg)
12
+ {
13
+ static double result = 0;
14
+
15
+ if (trace_arg)
16
+ {
17
+ // Only process creation of new objects
18
+ rb_event_flag_t event = rb_tracearg_event_flag(trace_arg);
19
+ if (event == RUBY_INTERNAL_EVENT_NEWOBJ) {
20
+ // Don't count allocations of internal IMemo objects
21
+ VALUE object = rb_tracearg_object(trace_arg);
22
+ if (BUILTIN_TYPE(object) != T_IMEMO)
23
+ result++;
24
+ }
25
+ }
26
+ return result;
27
+ }
28
+
29
+ prof_measurer_t* prof_measurer_allocations(bool track_allocations)
30
+ {
31
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
32
+ measure->mode = MEASURE_ALLOCATIONS;
33
+ measure->measure = measure_allocations;
34
+ measure->multiplier = 1;
35
+ // Need to track allocations to get RUBY_INTERNAL_EVENT_NEWOBJ event
36
+ measure->track_allocations = track_allocations;
37
+
38
+ return measure;
39
+ }
40
+
41
+ void rp_init_measure_allocations()
42
+ {
43
+ total_allocated_objects_key = ID2SYM(rb_intern("total_allocated_objects"));
44
+ rb_define_const(mProf, "ALLOCATIONS", INT2NUM(MEASURE_ALLOCATIONS));
45
+
46
+ cMeasureAllocations = rb_define_class_under(mMeasure, "Allocations", rb_cObject);
47
+ }
@@ -0,0 +1,46 @@
1
+ /* Copyright (C) 2005-2013 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ /* :nodoc: */
5
+
6
+ #include "rp_measurement.h"
7
+
8
+ static VALUE cMeasureMemory;
9
+
10
+ static double measure_memory(rb_trace_arg_t* trace_arg)
11
+ {
12
+ static double result = 0;
13
+
14
+ if (trace_arg)
15
+ {
16
+ // Only process creation of new objects
17
+ rb_event_flag_t event = rb_tracearg_event_flag(trace_arg);
18
+ if (event == RUBY_INTERNAL_EVENT_NEWOBJ)
19
+ {
20
+ // Don't count allocations of internal IMemo objects
21
+ VALUE object = rb_tracearg_object(trace_arg);
22
+ if (BUILTIN_TYPE(object) != T_IMEMO)
23
+ result += rb_obj_memsize_of(object);
24
+ }
25
+ }
26
+
27
+ return result;
28
+ }
29
+
30
+ prof_measurer_t* prof_measurer_memory(bool track_allocations)
31
+ {
32
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
33
+ measure->mode = MEASURE_MEMORY;
34
+ measure->measure = measure_memory;
35
+ measure->multiplier = 1;
36
+ // Need to track allocations to get RUBY_INTERNAL_EVENT_NEWOBJ event
37
+ measure->track_allocations = true;
38
+ return measure;
39
+ }
40
+
41
+ void rp_init_measure_memory()
42
+ {
43
+ rb_define_const(mProf, "MEMORY", INT2NUM(MEASURE_MEMORY));
44
+
45
+ cMeasureMemory = rb_define_class_under(mMeasure, "Allocations", rb_cObject);
46
+ }
@@ -0,0 +1,66 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ #include "rp_measurement.h"
5
+ #include <time.h>
6
+
7
+ static VALUE cMeasureProcessTime;
8
+
9
+ static double measure_process_time(rb_trace_arg_t* trace_arg)
10
+ {
11
+ #if defined(_WIN32)
12
+ FILETIME createTime;
13
+ FILETIME exitTime;
14
+ FILETIME kernelTime;
15
+ FILETIME userTime;
16
+
17
+ ULARGE_INTEGER kernelTimeInt;
18
+ ULARGE_INTEGER userTimeInt;
19
+
20
+ GetProcessTimes(GetCurrentProcess(), &createTime, &exitTime, &kernelTime, &userTime);
21
+
22
+ kernelTimeInt.LowPart = kernelTime.dwLowDateTime;
23
+ kernelTimeInt.HighPart = kernelTime.dwHighDateTime;
24
+ userTimeInt.LowPart = userTime.dwLowDateTime;
25
+ userTimeInt.HighPart = userTime.dwHighDateTime;
26
+
27
+ return (double)(kernelTimeInt.QuadPart + userTimeInt.QuadPart);
28
+ #elif !defined(CLOCK_PROCESS_CPUTIME_ID)
29
+ #include <sys/resource.h>
30
+ struct rusage usage;
31
+ getrusage(RUSAGE_SELF, &usage);
32
+ return usage.ru_stime.tv_sec + usage.ru_utime.tv_sec + ((usage.ru_stime.tv_usec + usage.ru_utime.tv_usec) / 1000000.0);
33
+ #else
34
+ struct timespec clock;
35
+ clock_gettime(CLOCK_PROCESS_CPUTIME_ID, &clock);
36
+ return clock.tv_sec + (clock.tv_nsec / 1000000000.0);
37
+ #endif
38
+ }
39
+
40
+ static double multiplier_process_time(void)
41
+ {
42
+ #if defined(_WIN32)
43
+ // Times are in 100-nanosecond time units. So instead of 10-9 use 10-7
44
+ return 1.0 / 10000000.0;
45
+ #else
46
+ return 1.0;
47
+ #endif
48
+ }
49
+
50
+ prof_measurer_t* prof_measurer_process_time(bool track_allocations)
51
+ {
52
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
53
+ measure->mode = MEASURE_PROCESS_TIME;
54
+ measure->measure = measure_process_time;
55
+ measure->multiplier = multiplier_process_time();
56
+ measure->track_allocations = track_allocations;
57
+ return measure;
58
+ }
59
+
60
+ void rp_init_measure_process_time()
61
+ {
62
+ rb_define_const(mProf, "CLOCKS_PER_SEC", INT2NUM(CLOCKS_PER_SEC));
63
+ rb_define_const(mProf, "PROCESS_TIME", INT2NUM(MEASURE_PROCESS_TIME));
64
+
65
+ cMeasureProcessTime = rb_define_class_under(mMeasure, "ProcessTime", rb_cObject);
66
+ }
@@ -0,0 +1,64 @@
1
+ /* Copyright (C) 2005-2019 Shugo Maeda <shugo@ruby-lang.org> and Charlie Savage <cfis@savagexi.com>
2
+ Please see the LICENSE file for copyright and distribution information */
3
+
4
+ /* :nodoc: */
5
+ #include "rp_measurement.h"
6
+
7
+ #if defined(__APPLE__)
8
+ #include <mach/mach_time.h>
9
+ #elif !defined(_WIN32)
10
+ #include <time.h>
11
+ #endif
12
+
13
+ static VALUE cMeasureWallTime;
14
+
15
+ static double measure_wall_time(rb_trace_arg_t* trace_arg)
16
+ {
17
+ #if defined(_WIN32)
18
+ LARGE_INTEGER time;
19
+ QueryPerformanceCounter(&time);
20
+ return (double)time.QuadPart;
21
+ #elif defined(__APPLE__)
22
+ return mach_absolute_time();// * (uint64_t)mach_timebase.numer / (uint64_t)mach_timebase.denom;
23
+ #elif defined(__linux__)
24
+ struct timespec tv;
25
+ clock_gettime(CLOCK_MONOTONIC, &tv);
26
+ return tv.tv_sec + (tv.tv_nsec / 1000000000.0);
27
+ #else
28
+ struct timeval tv;
29
+ gettimeofday(&tv, NULL);
30
+ return tv.tv_sec + (tv.tv_usec / 1000000.0);
31
+ #endif
32
+ }
33
+
34
+ static double multiplier_wall_time(void)
35
+ {
36
+ #if defined(_WIN32)
37
+ LARGE_INTEGER frequency;
38
+ QueryPerformanceFrequency(&frequency);
39
+ return 1.0 / frequency.QuadPart;
40
+ #elif defined(__APPLE__)
41
+ mach_timebase_info_data_t mach_timebase;
42
+ mach_timebase_info(&mach_timebase);
43
+ return (uint64_t)mach_timebase.numer / (uint64_t)mach_timebase.denom / 1000000000.0;
44
+ #else
45
+ return 1.0;
46
+ #endif
47
+ }
48
+
49
+ prof_measurer_t* prof_measurer_wall_time(bool track_allocations)
50
+ {
51
+ prof_measurer_t* measure = ALLOC(prof_measurer_t);
52
+ measure->mode = MEASURE_WALL_TIME;
53
+ measure->measure = measure_wall_time;
54
+ measure->multiplier = multiplier_wall_time();
55
+ measure->track_allocations = track_allocations;
56
+ return measure;
57
+ }
58
+
59
+ void rp_init_measure_wall_time()
60
+ {
61
+ rb_define_const(mProf, "WALL_TIME", INT2NUM(MEASURE_WALL_TIME));
62
+
63
+ cMeasureWallTime = rb_define_class_under(mMeasure, "WallTime", rb_cObject);
64
+ }