ruby-prof 1.5.0-x64-mingw-ucrt → 1.6.1-x64-mingw-ucrt

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGES +13 -0
  3. data/ext/ruby_prof/rp_allocation.c +136 -81
  4. data/ext/ruby_prof/rp_allocation.h +8 -6
  5. data/ext/ruby_prof/rp_call_tree.c +8 -1
  6. data/ext/ruby_prof/rp_measurement.c +10 -3
  7. data/ext/ruby_prof/rp_method.c +51 -76
  8. data/ext/ruby_prof/rp_profile.c +62 -77
  9. data/ext/ruby_prof/rp_profile.h +1 -0
  10. data/ext/ruby_prof/rp_thread.c +14 -4
  11. data/ext/ruby_prof/vc/ruby_prof.vcxproj +1 -1
  12. data/lib/3.1/ruby_prof.so +0 -0
  13. data/lib/3.2/ruby_prof.so +0 -0
  14. data/lib/ruby-prof/compatibility.rb +14 -0
  15. data/lib/ruby-prof/printers/abstract_printer.rb +1 -1
  16. data/lib/ruby-prof/printers/call_tree_printer.rb +1 -1
  17. data/lib/ruby-prof/printers/multi_printer.rb +17 -17
  18. data/lib/ruby-prof/profile.rb +5 -5
  19. data/lib/ruby-prof/rack.rb +31 -21
  20. data/lib/ruby-prof/version.rb +1 -1
  21. data/test/abstract_printer_test.rb +1 -0
  22. data/test/alias_test.rb +6 -11
  23. data/test/call_tree_visitor_test.rb +1 -6
  24. data/test/call_trees_test.rb +2 -2
  25. data/test/{basic_test.rb → compatibility_test.rb} +8 -2
  26. data/test/duplicate_names_test.rb +1 -1
  27. data/test/dynamic_method_test.rb +1 -6
  28. data/test/enumerable_test.rb +1 -1
  29. data/test/exceptions_test.rb +2 -2
  30. data/test/exclude_methods_test.rb +3 -8
  31. data/test/exclude_threads_test.rb +4 -9
  32. data/test/fiber_test.rb +4 -9
  33. data/test/gc_test.rb +2 -1
  34. data/test/inverse_call_tree_test.rb +33 -34
  35. data/test/line_number_test.rb +1 -1
  36. data/test/marshal_test.rb +3 -3
  37. data/test/measure_allocations_test.rb +8 -17
  38. data/test/measure_memory_test.rb +3 -12
  39. data/test/measure_process_time_test.rb +29 -34
  40. data/test/measure_wall_time_test.rb +176 -181
  41. data/test/multi_printer_test.rb +0 -5
  42. data/test/no_method_class_test.rb +1 -1
  43. data/test/pause_resume_test.rb +12 -16
  44. data/test/printer_call_stack_test.rb +2 -2
  45. data/test/printer_call_tree_test.rb +2 -2
  46. data/test/printer_flat_test.rb +1 -1
  47. data/test/printer_graph_html_test.rb +2 -2
  48. data/test/printer_graph_test.rb +2 -2
  49. data/test/printers_test.rb +14 -20
  50. data/test/printing_recursive_graph_test.rb +2 -2
  51. data/test/recursive_test.rb +2 -7
  52. data/test/singleton_test.rb +1 -1
  53. data/test/stack_printer_test.rb +5 -8
  54. data/test/start_stop_test.rb +11 -14
  55. data/test/thread_test.rb +13 -15
  56. data/test/unique_call_path_test.rb +4 -4
  57. data/test/yarv_test.rb +3 -3
  58. metadata +4 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc83e5cdca77c3e07af7ad99f69acfbf8dde2013c435855d996925db9dcd586c
4
- data.tar.gz: 9c14d146c1194db666cec9f9ab05e4a798bd0a6ed5c535d850be581ddf8734fa
3
+ metadata.gz: b366b0cde0e477d0b06b95b8bcb7f2d0c8747216b9cc8fcdf2c202e89a3106d6
4
+ data.tar.gz: 21239104377ef6a1479c4110dcd4cd2e2b086d75cd577a3531102799f19b8061
5
5
  SHA512:
6
- metadata.gz: 56e6c6a054fed0bdbb08a41ead10898495efb4201c19f2bd3572f3b0ac939e1770c3c4f1bb82da4399ba27f99ed135447e004263d57af86a0888a6a6a349f1fc
7
- data.tar.gz: 251eba317457ec6ea583b25656c062f92e8f55a35c7279f39a1f9987234fd9b481fbfd6ab72d71af34541ca49aad2b0feacefbc4d44ca75a4486ec003cb2fa19
6
+ metadata.gz: 6aa08fee0a9d16b9276867db6f7465286cae2d118738ff20e78bca8eae764bf9a6657bf93678b0f3430f2998c11117022e6bd6d732d29021bc4fb3b5e6f34615
7
+ data.tar.gz: b34eb2eb7e9bfb6567292ead8d625a3fd71533d3d2ed87be0798ab9e4e5def42a86768b02605f64048b25fbc90c2b736c5b1c4add494900c8250bfcf480e728d
data/CHANGES CHANGED
@@ -1,3 +1,16 @@
1
+ 1.6.1 (2023-02-21)
2
+ =====================
3
+ * Fix loading C extension for MacOS (Charlie Savage)
4
+
5
+ 1.6.0 (2023-02-20)
6
+ =====================
7
+ * Add support for Ruby's compacting garbage collector (Charlie Savage)
8
+ * Add rbs signature files (Charlie Savage)
9
+ * Update rack adapter used for profiling Rails to include latest ruby-prof features (Charlie Savage)
10
+ * Add warnings for deprecated methods (Charlie Savage)
11
+ * Update tests to not use deprecated methods (Charlie Savage)
12
+ * Improve tests on OSX (Charlie Savage)
13
+
1
14
  1.5.0 (2023-02-06)
2
15
  =====================
3
16
  * Add new Profile#merge! method that merges results for threads/fibers that share the same root method (Charlie Savage)
@@ -2,32 +2,11 @@
2
2
  Please see the LICENSE file for copyright and distribution information */
3
3
 
4
4
  #include "rp_allocation.h"
5
+ #include "rp_method.h"
5
6
 
6
7
  VALUE cRpAllocation;
7
8
 
8
- prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
9
- {
10
- prof_allocation_t* result = NULL;
11
- st_data_t value;
12
- if (rb_st_lookup(table, key, &value))
13
- {
14
- result = (prof_allocation_t*)value;
15
- }
16
-
17
- return result;
18
- }
19
-
20
- void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
21
- {
22
- rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
23
- }
24
-
25
- st_data_t allocations_key(VALUE klass, int source_line)
26
- {
27
- return (klass << 4) + source_line;
28
- }
29
-
30
- /* ====== prof_allocation_t ====== */
9
+ // ------ prof_allocation_t ------
31
10
  prof_allocation_t* prof_allocation_create(void)
32
11
  {
33
12
  prof_allocation_t* result = ALLOC(prof_allocation_t);
@@ -43,48 +22,17 @@ prof_allocation_t* prof_allocation_create(void)
43
22
  return result;
44
23
  }
45
24
 
46
- prof_allocation_t* prof_get_allocation(VALUE self)
25
+ prof_allocation_t* prof_allocation_get(VALUE self)
47
26
  {
48
27
  /* Can't use Data_Get_Struct because that triggers the event hook
49
28
  ending up in endless recursion. */
50
29
  prof_allocation_t* result = RTYPEDDATA_DATA(self);
51
-
52
30
  if (!result)
53
31
  rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
54
32
 
55
33
  return result;
56
34
  }
57
35
 
58
- prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
59
- {
60
- VALUE object = rb_tracearg_object(trace_arg);
61
- if (BUILTIN_TYPE(object) == T_IMEMO)
62
- return NULL;
63
-
64
- VALUE klass = rb_obj_class(object);
65
-
66
- int source_line = FIX2INT(rb_tracearg_lineno(trace_arg));
67
- st_data_t key = allocations_key(klass, source_line);
68
-
69
- prof_allocation_t* allocation = allocations_table_lookup(method->allocations_table, key);
70
- if (!allocation)
71
- {
72
- allocation = prof_allocation_create();
73
- allocation->source_line = source_line;
74
- allocation->source_file = rb_tracearg_path(trace_arg);
75
- allocation->klass_flags = 0;
76
- allocation->klass = resolve_klass(klass, &allocation->klass_flags);
77
-
78
- allocation->key = key;
79
- allocations_table_insert(method->allocations_table, key, allocation);
80
- }
81
-
82
- allocation->count++;
83
- allocation->memory += rb_obj_memsize_of(object);
84
-
85
- return allocation;
86
- }
87
-
88
36
  static void prof_allocation_ruby_gc_free(void* data)
89
37
  {
90
38
  if (data)
@@ -118,30 +66,39 @@ void prof_allocation_mark(void* data)
118
66
 
119
67
  prof_allocation_t* allocation = (prof_allocation_t*)data;
120
68
  if (allocation->object != Qnil)
121
- rb_gc_mark(allocation->object);
69
+ rb_gc_mark_movable(allocation->object);
122
70
 
123
71
  if (allocation->klass != Qnil)
124
- rb_gc_mark(allocation->klass);
72
+ rb_gc_mark_movable(allocation->klass);
125
73
 
126
74
  if (allocation->klass_name != Qnil)
127
- rb_gc_mark(allocation->klass_name);
75
+ rb_gc_mark_movable(allocation->klass_name);
128
76
 
129
77
  if (allocation->source_file != Qnil)
130
78
  rb_gc_mark(allocation->source_file);
131
79
  }
132
80
 
133
- static const rb_data_type_t allocation_type =
81
+ void prof_allocation_compact(void* data)
134
82
  {
135
- .wrap_struct_name = "Allocation",
136
- .function =
137
- {
138
- .dmark = prof_allocation_mark,
139
- .dfree = prof_allocation_ruby_gc_free,
140
- .dsize = prof_allocation_size,
141
- },
142
- .data = NULL,
143
- .flags = RUBY_TYPED_FREE_IMMEDIATELY
144
- };
83
+ prof_allocation_t* allocation = (prof_allocation_t*)data;
84
+ allocation->object = rb_gc_location(allocation->object);
85
+ allocation->klass = rb_gc_location(allocation->klass);
86
+ allocation->klass_name = rb_gc_location(allocation->klass_name);
87
+ }
88
+
89
+ static const rb_data_type_t allocation_type =
90
+ {
91
+ .wrap_struct_name = "Allocation",
92
+ .function =
93
+ {
94
+ .dmark = prof_allocation_mark,
95
+ .dfree = prof_allocation_ruby_gc_free,
96
+ .dsize = prof_allocation_size,
97
+ .dcompact = prof_allocation_compact
98
+ },
99
+ .data = NULL,
100
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
101
+ };
145
102
 
146
103
  VALUE prof_allocation_wrap(prof_allocation_t* allocation)
147
104
  {
@@ -152,24 +109,122 @@ VALUE prof_allocation_wrap(prof_allocation_t* allocation)
152
109
  return allocation->object;
153
110
  }
154
111
 
155
- static VALUE prof_allocation_allocate(VALUE klass)
112
+ /* ====== Allocation Table ====== */
113
+ st_table* prof_allocations_create()
156
114
  {
157
- prof_allocation_t* allocation = prof_allocation_create();
158
- allocation->object = prof_allocation_wrap(allocation);
159
- return allocation->object;
115
+ return rb_st_init_numtable();
160
116
  }
161
117
 
162
- prof_allocation_t* prof_allocation_get(VALUE self)
118
+ static int allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
163
119
  {
164
- /* Can't use Data_Get_Struct because that triggers the event hook
165
- ending up in endless recursion. */
166
- prof_allocation_t* result = RTYPEDDATA_DATA(self);
167
- if (!result)
168
- rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
120
+ prof_allocation_free((prof_allocation_t*)value);
121
+ return ST_CONTINUE;
122
+ }
123
+
124
+ st_data_t allocations_key(VALUE klass, int source_line)
125
+ {
126
+ return (klass << 4) + source_line;
127
+ }
128
+
129
+ static int prof_allocations_collect(st_data_t key, st_data_t value, st_data_t result)
130
+ {
131
+ prof_allocation_t* allocation = (prof_allocation_t*)value;
132
+ VALUE arr = (VALUE)result;
133
+ rb_ary_push(arr, prof_allocation_wrap(allocation));
134
+ return ST_CONTINUE;
135
+ }
136
+
137
+ static int prof_allocations_mark_each(st_data_t key, st_data_t value, st_data_t data)
138
+ {
139
+ prof_allocation_t* allocation = (prof_allocation_t*)value;
140
+ prof_allocation_mark(allocation);
141
+ return ST_CONTINUE;
142
+ }
143
+
144
+ void prof_allocations_mark(st_table* allocations_table)
145
+ {
146
+ rb_st_foreach(allocations_table, prof_allocations_mark_each, 0);
147
+ }
148
+
149
+ void prof_allocations_free(st_table* table)
150
+ {
151
+ rb_st_foreach(table, allocations_table_free_iterator, 0);
152
+ rb_st_free_table(table);
153
+ }
154
+
155
+ prof_allocation_t* allocations_table_lookup(st_table* table, st_data_t key)
156
+ {
157
+ prof_allocation_t* result = NULL;
158
+ st_data_t value;
159
+ if (rb_st_lookup(table, key, &value))
160
+ {
161
+ result = (prof_allocation_t*)value;
162
+ }
169
163
 
170
164
  return result;
171
165
  }
172
166
 
167
+ void allocations_table_insert(st_table* table, st_data_t key, prof_allocation_t* allocation)
168
+ {
169
+ rb_st_insert(table, (st_data_t)key, (st_data_t)allocation);
170
+ }
171
+
172
+ prof_allocation_t* prof_allocate_increment(st_table* allocations_table, rb_trace_arg_t* trace_arg)
173
+ {
174
+ VALUE object = rb_tracearg_object(trace_arg);
175
+ if (BUILTIN_TYPE(object) == T_IMEMO)
176
+ return NULL;
177
+
178
+ VALUE klass = rb_obj_class(object);
179
+
180
+ int source_line = FIX2INT(rb_tracearg_lineno(trace_arg));
181
+ st_data_t key = allocations_key(klass, source_line);
182
+
183
+ prof_allocation_t* allocation = allocations_table_lookup(allocations_table, key);
184
+ if (!allocation)
185
+ {
186
+ allocation = prof_allocation_create();
187
+ allocation->source_line = source_line;
188
+ allocation->source_file = rb_tracearg_path(trace_arg);
189
+ allocation->klass_flags = 0;
190
+ allocation->klass = resolve_klass(klass, &allocation->klass_flags);
191
+
192
+ allocation->key = key;
193
+ allocations_table_insert(allocations_table, key, allocation);
194
+ }
195
+
196
+ allocation->count++;
197
+ allocation->memory += rb_obj_memsize_of(object);
198
+
199
+ return allocation;
200
+ }
201
+
202
+ // Returns an array of allocations
203
+ VALUE prof_allocations_wrap(st_table* allocations_table)
204
+ {
205
+ VALUE result = rb_ary_new();
206
+ rb_st_foreach(allocations_table, prof_allocations_collect, result);
207
+ return result;
208
+ }
209
+
210
+ void prof_allocations_unwrap(st_table* allocations_table, VALUE allocations)
211
+ {
212
+ for (int i = 0; i < rb_array_len(allocations); i++)
213
+ {
214
+ VALUE allocation = rb_ary_entry(allocations, i);
215
+ prof_allocation_t* allocation_data = prof_allocation_get(allocation);
216
+ rb_st_insert(allocations_table, allocation_data->key, (st_data_t)allocation_data);
217
+ }
218
+ }
219
+
220
+ /* ====== prof_allocation_t ====== */
221
+ static VALUE prof_allocation_allocate(VALUE klass)
222
+ {
223
+ prof_allocation_t* allocation = prof_allocation_create();
224
+ allocation->object = prof_allocation_wrap(allocation);
225
+ return allocation->object;
226
+ }
227
+
173
228
  /* call-seq:
174
229
  klass -> Class
175
230
 
@@ -238,7 +293,7 @@ static VALUE prof_allocation_memory(VALUE self)
238
293
  /* :nodoc: */
239
294
  static VALUE prof_allocation_dump(VALUE self)
240
295
  {
241
- prof_allocation_t* allocation = prof_get_allocation(self);
296
+ prof_allocation_t* allocation = prof_allocation_get(self);
242
297
 
243
298
  VALUE result = rb_hash_new();
244
299
 
@@ -256,7 +311,7 @@ static VALUE prof_allocation_dump(VALUE self)
256
311
  /* :nodoc: */
257
312
  static VALUE prof_allocation_load(VALUE self, VALUE data)
258
313
  {
259
- prof_allocation_t* allocation = prof_get_allocation(self);
314
+ prof_allocation_t* allocation = prof_allocation_get(self);
260
315
  allocation->object = self;
261
316
 
262
317
  allocation->key = RB_NUM2ULL(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
@@ -5,7 +5,6 @@
5
5
  #define _RP_ALLOCATION_
6
6
 
7
7
  #include "ruby_prof.h"
8
- #include "rp_method.h"
9
8
 
10
9
  typedef struct prof_allocation_t
11
10
  {
@@ -20,12 +19,15 @@ typedef struct prof_allocation_t
20
19
  VALUE object; /* Cache to wrapped object */
21
20
  } prof_allocation_t;
22
21
 
22
+ // Allocation (prof_allocation_t*)
23
23
  void rp_init_allocation(void);
24
- void prof_allocation_free(prof_allocation_t* allocation);
25
- void prof_allocation_mark(void* data);
26
- VALUE prof_allocation_wrap(prof_allocation_t* allocation);
27
- prof_allocation_t* prof_allocation_get(VALUE self);
28
- prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg);
24
+ prof_allocation_t* prof_allocate_increment(st_table* allocations_table, rb_trace_arg_t* trace_arg);
29
25
 
26
+ // Allocations (st_table*)
27
+ st_table* prof_allocations_create(void);
28
+ VALUE prof_allocations_wrap(st_table* allocations_table);
29
+ void prof_allocations_unwrap(st_table* allocations_table, VALUE allocations);
30
+ void prof_allocations_mark(st_table* allocations_table);
31
+ void prof_allocations_free(st_table* table);
30
32
 
31
33
  #endif //_RP_ALLOCATION_
@@ -54,7 +54,7 @@ void prof_call_tree_mark(void* data)
54
54
  prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
55
55
 
56
56
  if (call_tree->object != Qnil)
57
- rb_gc_mark(call_tree->object);
57
+ rb_gc_mark_movable(call_tree->object);
58
58
 
59
59
  if (call_tree->source_file != Qnil)
60
60
  rb_gc_mark(call_tree->source_file);
@@ -68,6 +68,12 @@ void prof_call_tree_mark(void* data)
68
68
  rb_st_foreach(call_tree->children, prof_call_tree_mark_children, 0);
69
69
  }
70
70
 
71
+ void prof_call_tree_compact(void* data)
72
+ {
73
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
74
+ call_tree->object = rb_gc_location(call_tree->object);
75
+ }
76
+
71
77
  static int prof_call_tree_free_children(st_data_t key, st_data_t value, st_data_t data)
72
78
  {
73
79
  prof_call_tree_t* call_tree = (prof_call_tree_t*)value;
@@ -130,6 +136,7 @@ static const rb_data_type_t call_tree_type =
130
136
  .dmark = prof_call_tree_mark,
131
137
  .dfree = prof_call_tree_ruby_gc_free,
132
138
  .dsize = prof_call_tree_size,
139
+ .dcompact = prof_call_tree_compact
133
140
  },
134
141
  .data = NULL,
135
142
  .flags = RUBY_TYPED_FREE_IMMEDIATELY
@@ -102,10 +102,16 @@ void prof_measurement_mark(void* data)
102
102
  {
103
103
  if (!data) return;
104
104
 
105
- prof_measurement_t* measurement_data = (prof_measurement_t*)data;
105
+ prof_measurement_t* measurement = (prof_measurement_t*)data;
106
106
 
107
- if (measurement_data->object != Qnil)
108
- rb_gc_mark(measurement_data->object);
107
+ if (measurement->object != Qnil)
108
+ rb_gc_mark_movable(measurement->object);
109
+ }
110
+
111
+ void prof_measurement_compact(void* data)
112
+ {
113
+ prof_measurement_t* measurement = (prof_measurement_t*)data;
114
+ measurement->object = rb_gc_location(measurement->object);
109
115
  }
110
116
 
111
117
  void prof_measurement_free(prof_measurement_t* measurement)
@@ -155,6 +161,7 @@ static const rb_data_type_t measurement_type =
155
161
  .dmark = prof_measurement_mark,
156
162
  .dfree = prof_measurement_ruby_gc_free,
157
163
  .dsize = prof_measurement_size,
164
+ .dcompact = prof_measurement_compact
158
165
  },
159
166
  .data = NULL,
160
167
  .flags = RUBY_TYPED_FREE_IMMEDIATELY
@@ -22,31 +22,38 @@ VALUE resolve_klass(VALUE klass, unsigned int* klass_flags)
22
22
  figure out what it is attached to.*/
23
23
  VALUE attached = rb_iv_get(klass, "__attached__");
24
24
 
25
- /* Is this a singleton class acting as a metaclass? */
26
- if (BUILTIN_TYPE(attached) == T_CLASS)
25
+ switch (BUILTIN_TYPE(attached))
27
26
  {
28
- *klass_flags |= kClassSingleton;
29
- result = attached;
30
- }
31
- /* Is this for singleton methods on a module? */
32
- else if (BUILTIN_TYPE(attached) == T_MODULE)
33
- {
34
- *klass_flags |= kModuleSingleton;
35
- result = attached;
36
- }
37
- /* Is this for singleton methods on an object? */
38
- else if (BUILTIN_TYPE(attached) == T_OBJECT)
39
- {
40
- *klass_flags |= kObjectSingleton;
41
- result = rb_class_superclass(klass);
42
- }
43
- /* Ok, this could be other things like an array put onto
44
- a singleton object (yeah, it happens, see the singleton
45
- objects test case). */
46
- else
47
- {
48
- *klass_flags |= kOtherSingleton;
49
- result = klass;
27
+ /* Is this a singleton class acting as a metaclass? */
28
+ case T_CLASS:
29
+ {
30
+ *klass_flags |= kClassSingleton;
31
+ result = attached;
32
+ break;
33
+ }
34
+ /* Is this for singleton methods on a module? */
35
+ case T_MODULE:
36
+ {
37
+ *klass_flags |= kModuleSingleton;
38
+ result = attached;
39
+ break;
40
+ }
41
+ /* Is this for singleton methods on an object? */
42
+ case T_OBJECT:
43
+ {
44
+ *klass_flags |= kObjectSingleton;
45
+ result = rb_class_superclass(klass);
46
+ break;
47
+ }
48
+ /* Ok, this could be other things like an array put onto
49
+ a singleton object (yeah, it happens, see the singleton
50
+ objects test case). */
51
+ default:
52
+ {
53
+ *klass_flags |= kOtherSingleton;
54
+ result = klass;
55
+ break;
56
+ }
50
57
  }
51
58
  }
52
59
  /* Is this an include for a module? If so get the actual
@@ -56,7 +63,7 @@ VALUE resolve_klass(VALUE klass, unsigned int* klass_flags)
56
63
  {
57
64
  unsigned int dummy;
58
65
  *klass_flags |= kModuleIncludee;
59
- result = resolve_klass(RBASIC(klass)->klass, &dummy);
66
+ result = resolve_klass(RBASIC_CLASS(klass), &dummy);
60
67
  }
61
68
  return result;
62
69
  }
@@ -94,7 +101,7 @@ st_data_t method_key(VALUE klass, VALUE msym)
94
101
  }
95
102
  else if (BUILTIN_TYPE(klass) == T_ICLASS)
96
103
  {
97
- resolved_klass = RBASIC(klass)->klass;
104
+ resolved_klass = RBASIC_CLASS(klass);
98
105
  }
99
106
 
100
107
  st_data_t hash = rb_hash_start(0);
@@ -105,39 +112,6 @@ st_data_t method_key(VALUE klass, VALUE msym)
105
112
  return hash;
106
113
  }
107
114
 
108
- /* ====== Allocation Table ====== */
109
- st_table* allocations_table_create()
110
- {
111
- return rb_st_init_numtable();
112
- }
113
-
114
- static int allocations_table_free_iterator(st_data_t key, st_data_t value, st_data_t dummy)
115
- {
116
- prof_allocation_free((prof_allocation_t*)value);
117
- return ST_CONTINUE;
118
- }
119
-
120
- static int prof_method_collect_allocations(st_data_t key, st_data_t value, st_data_t result)
121
- {
122
- prof_allocation_t* allocation = (prof_allocation_t*)value;
123
- VALUE arr = (VALUE)result;
124
- rb_ary_push(arr, prof_allocation_wrap(allocation));
125
- return ST_CONTINUE;
126
- }
127
-
128
- static int prof_method_mark_allocations(st_data_t key, st_data_t value, st_data_t data)
129
- {
130
- prof_allocation_t* allocation = (prof_allocation_t*)value;
131
- prof_allocation_mark(allocation);
132
- return ST_CONTINUE;
133
- }
134
-
135
- void allocations_table_free(st_table* table)
136
- {
137
- rb_st_foreach(table, allocations_table_free_iterator, 0);
138
- rb_st_free_table(table);
139
- }
140
-
141
115
  /* ================ prof_method_t =================*/
142
116
  prof_method_t* prof_get_method(VALUE self)
143
117
  {
@@ -167,7 +141,7 @@ prof_method_t* prof_method_create(VALUE profile, VALUE klass, VALUE msym, VALUE
167
141
  result->measurement = prof_measurement_create();
168
142
 
169
143
  result->call_trees = prof_call_trees_create();
170
- result->allocations_table = allocations_table_create();
144
+ result->allocations_table = prof_allocations_create();
171
145
 
172
146
  result->visits = 0;
173
147
  result->recursive = false;
@@ -205,7 +179,7 @@ static void prof_method_free(prof_method_t* method)
205
179
  method->object = Qnil;
206
180
  }
207
181
 
208
- allocations_table_free(method->allocations_table);
182
+ prof_allocations_free(method->allocations_table);
209
183
  prof_call_trees_free(method->call_trees);
210
184
  prof_measurement_free(method->measurement);
211
185
  xfree(method);
@@ -223,21 +197,29 @@ void prof_method_mark(void* data)
223
197
  prof_method_t* method = (prof_method_t*)data;
224
198
 
225
199
  if (method->profile != Qnil)
226
- rb_gc_mark(method->profile);
200
+ rb_gc_mark_movable(method->profile);
227
201
 
228
202
  if (method->object != Qnil)
229
- rb_gc_mark(method->object);
203
+ rb_gc_mark_movable(method->object);
230
204
 
231
- rb_gc_mark(method->klass_name);
232
- rb_gc_mark(method->method_name);
205
+ rb_gc_mark_movable(method->klass_name);
206
+ rb_gc_mark_movable(method->method_name);
233
207
  rb_gc_mark(method->source_file);
234
208
 
235
209
  if (method->klass != Qnil)
236
210
  rb_gc_mark(method->klass);
237
211
 
238
212
  prof_measurement_mark(method->measurement);
213
+ prof_allocations_mark(method->allocations_table);
214
+ }
239
215
 
240
- rb_st_foreach(method->allocations_table, prof_method_mark_allocations, 0);
216
+ void prof_method_compact(void* data)
217
+ {
218
+ prof_method_t* method = (prof_method_t*)data;
219
+ method->object = rb_gc_location(method->object);
220
+ method->profile = rb_gc_location(method->profile);
221
+ method->klass_name = rb_gc_location(method->klass_name);
222
+ method->method_name = rb_gc_location(method->method_name);
241
223
  }
242
224
 
243
225
  static VALUE prof_method_allocate(VALUE klass)
@@ -255,6 +237,7 @@ static const rb_data_type_t method_info_type =
255
237
  .dmark = prof_method_mark,
256
238
  .dfree = prof_method_ruby_gc_free,
257
239
  .dsize = prof_method_size,
240
+ .dcompact = prof_method_compact
258
241
  },
259
242
  .data = NULL,
260
243
  .flags = RUBY_TYPED_FREE_IMMEDIATELY
@@ -360,9 +343,7 @@ Returns an array of allocation information.*/
360
343
  static VALUE prof_method_allocations(VALUE self)
361
344
  {
362
345
  prof_method_t* method = prof_get_method(self);
363
- VALUE result = rb_ary_new();
364
- rb_st_foreach(method->allocations_table, prof_method_collect_allocations, result);
365
- return result;
346
+ return prof_allocations_wrap(method->allocations_table);
366
347
  }
367
348
 
368
349
  /* call-seq:
@@ -499,13 +480,7 @@ static VALUE prof_method_load(VALUE self, VALUE data)
499
480
  method_data->measurement = prof_get_measurement(measurement);
500
481
 
501
482
  VALUE allocations = rb_hash_aref(data, ID2SYM(rb_intern("allocations")));
502
- for (int i = 0; i < rb_array_len(allocations); i++)
503
- {
504
- VALUE allocation = rb_ary_entry(allocations, i);
505
- prof_allocation_t* allocation_data = prof_allocation_get(allocation);
506
-
507
- rb_st_insert(method_data->allocations_table, allocation_data->key, (st_data_t)allocation_data);
508
- }
483
+ prof_allocations_unwrap(method_data->allocations_table, allocations);
509
484
  return data;
510
485
  }
511
486