ruby-prof 1.2.0 → 1.3.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 0006623bb548f82d74fa8bdd6cfa8e257ec5deea1d4f37f53dab8d9c780427d9
4
- data.tar.gz: b30fd0deaf1ac3c3acf8519e3280aac4fc2b48a402658905ffe2f68c14edb80a
3
+ metadata.gz: 522834d46befbdc0f999e78d9bff9f2f98883c6bf5a15fc8d56444bf34778a72
4
+ data.tar.gz: b4bf7cc5ec23482ccbcb103b6c7ab87c8614da0076d6a43f4848d6cd8aee3178
5
5
  SHA512:
6
- metadata.gz: 7869ef3006a8e36883b5fb6f592eacf7d0243ef1959c0f1b8f67f378004bb880199ffa0914d5df4807ae35cd0c7843de1f2379a0602c798f83b4b0e25b6d764d
7
- data.tar.gz: d804cfc1f3c77f864583b5afcbf608bce6d9af098343a9212cb1a7e19488f2fae7177d225a55887249c85ee15efc2d6b1c239b5b847b9fd7deca298aa3a368a6
6
+ metadata.gz: ef3c25b85260c8c3878c3dbfb5d80469731f1070ecb3d5d8c474dc8d5c149578757fcfec91af508742cdd1b1da7500a4cf7dc2670b23f3261d5689f4fcd5f27c
7
+ data.tar.gz: bc891673567736bef06b12eb72514425261a678ad9d33ad709835e1e1cffd265c4657ba436a2e28486a45fa35fd678117ac2a2882c564fbfb334eb1edcec3918
data/CHANGES CHANGED
@@ -1,3 +1,10 @@
1
+ 1.3.0 (2020-02-22)
2
+ =====================
3
+ * Update C code to use newer RTypedData API versus older RData API.
4
+ * Update C code to use rb_cObject versus the older, deprecated, rb_cData.
5
+ * Update memory management - CallInfo's now keep alive their owning Profile instances. Fixes crashes that
6
+ could happen in rare instances.
7
+
1
8
  1.2.0 (2020-01-23)
2
9
  =====================
3
10
  * Fix call stack printer broken in version 1.0.0 (Charlie Savage)
@@ -24,11 +24,29 @@ static void prof_aggregate_call_tree_ruby_gc_free(void* data)
24
24
  prof_call_tree_free(call_tree);
25
25
  }
26
26
 
27
+ size_t prof_aggregate_call_tree_size(const void* data)
28
+ {
29
+ return sizeof(prof_call_tree_t);
30
+ }
31
+
32
+ static const rb_data_type_t aggregate_call_tree_type =
33
+ {
34
+ .wrap_struct_name = "Aggregate_CallTree",
35
+ .function =
36
+ {
37
+ .dmark = prof_aggregate_call_tree_mark,
38
+ .dfree = prof_aggregate_call_tree_ruby_gc_free,
39
+ .dsize = prof_aggregate_call_tree_size,
40
+ },
41
+ .data = NULL,
42
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
43
+ };
44
+
27
45
  VALUE prof_aggregate_call_tree_wrap(prof_call_tree_t* call_tree)
28
46
  {
29
47
  if (call_tree->object == Qnil)
30
48
  {
31
- call_tree->object = Data_Wrap_Struct(cRpAggregateCallTree, prof_aggregate_call_tree_mark, prof_aggregate_call_tree_ruby_gc_free, call_tree);
49
+ call_tree->object = TypedData_Wrap_Struct(cRpAggregateCallTree, &aggregate_call_tree_type, call_tree);
32
50
  }
33
51
  return call_tree->object;
34
52
  }
@@ -43,6 +43,18 @@ prof_allocation_t* prof_allocation_create(void)
43
43
  return result;
44
44
  }
45
45
 
46
+ prof_allocation_t* prof_get_allocation(VALUE self)
47
+ {
48
+ /* Can't use Data_Get_Struct because that triggers the event hook
49
+ ending up in endless recursion. */
50
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
51
+
52
+ if (!result)
53
+ rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
54
+
55
+ return result;
56
+ }
57
+
46
58
  prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t* trace_arg)
47
59
  {
48
60
  VALUE object = rb_tracearg_object(trace_arg);
@@ -75,8 +87,11 @@ prof_allocation_t* prof_allocate_increment(prof_method_t* method, rb_trace_arg_t
75
87
 
76
88
  static void prof_allocation_ruby_gc_free(void* data)
77
89
  {
78
- prof_allocation_t* allocation = (prof_allocation_t*)data;
79
- allocation->object = Qnil;
90
+ if (data)
91
+ {
92
+ prof_allocation_t* allocation = (prof_allocation_t*)data;
93
+ allocation->object = Qnil;
94
+ }
80
95
  }
81
96
 
82
97
  void prof_allocation_free(prof_allocation_t* allocation)
@@ -85,9 +100,7 @@ void prof_allocation_free(prof_allocation_t* allocation)
85
100
  yes clean it up so to avoid a segmentation fault. */
86
101
  if (allocation->object != Qnil)
87
102
  {
88
- RDATA(allocation->object)->dmark = NULL;
89
- RDATA(allocation->object)->dfree = NULL;
90
- RDATA(allocation->object)->data = NULL;
103
+ RTYPEDDATA(allocation->object)->data = NULL;
91
104
  allocation->object = Qnil;
92
105
  }
93
106
 
@@ -101,6 +114,8 @@ size_t prof_allocation_size(const void* data)
101
114
 
102
115
  void prof_allocation_mark(void* data)
103
116
  {
117
+ if (!data) return;
118
+
104
119
  prof_allocation_t* allocation = (prof_allocation_t*)data;
105
120
  if (allocation->object != Qnil)
106
121
  rb_gc_mark(allocation->object);
@@ -115,11 +130,24 @@ void prof_allocation_mark(void* data)
115
130
  rb_gc_mark(allocation->source_file);
116
131
  }
117
132
 
133
+ static const rb_data_type_t allocation_type =
134
+ {
135
+ .wrap_struct_name = "Allocation",
136
+ .function =
137
+ {
138
+ .dmark = prof_allocation_mark,
139
+ .dfree = prof_allocation_ruby_gc_free,
140
+ .dsize = prof_allocation_size,
141
+ },
142
+ .data = NULL,
143
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
144
+ };
145
+
118
146
  VALUE prof_allocation_wrap(prof_allocation_t* allocation)
119
147
  {
120
148
  if (allocation->object == Qnil)
121
149
  {
122
- allocation->object = Data_Wrap_Struct(cRpAllocation, prof_allocation_mark, prof_allocation_ruby_gc_free, allocation);
150
+ allocation->object = TypedData_Wrap_Struct(cRpAllocation, &allocation_type, allocation);
123
151
  }
124
152
  return allocation->object;
125
153
  }
@@ -135,7 +163,7 @@ prof_allocation_t* prof_allocation_get(VALUE self)
135
163
  {
136
164
  /* Can't use Data_Get_Struct because that triggers the event hook
137
165
  ending up in endless recursion. */
138
- prof_allocation_t* result = DATA_PTR(self);
166
+ prof_allocation_t* result = RTYPEDDATA_DATA(self);
139
167
  if (!result)
140
168
  rb_raise(rb_eRuntimeError, "This RubyProf::Allocation instance has already been freed, likely because its profile has been freed.");
141
169
 
@@ -210,7 +238,7 @@ static VALUE prof_allocation_memory(VALUE self)
210
238
  /* :nodoc: */
211
239
  static VALUE prof_allocation_dump(VALUE self)
212
240
  {
213
- prof_allocation_t* allocation = DATA_PTR(self);
241
+ prof_allocation_t* allocation = prof_get_allocation(self);
214
242
 
215
243
  VALUE result = rb_hash_new();
216
244
 
@@ -228,7 +256,7 @@ static VALUE prof_allocation_dump(VALUE self)
228
256
  /* :nodoc: */
229
257
  static VALUE prof_allocation_load(VALUE self, VALUE data)
230
258
  {
231
- prof_allocation_t* allocation = DATA_PTR(self);
259
+ prof_allocation_t* allocation = prof_get_allocation(self);
232
260
  allocation->object = self;
233
261
 
234
262
  allocation->key = FIX2LONG(rb_hash_aref(data, ID2SYM(rb_intern("key"))));
@@ -244,7 +272,7 @@ static VALUE prof_allocation_load(VALUE self, VALUE data)
244
272
 
245
273
  void rp_init_allocation(void)
246
274
  {
247
- cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cData);
275
+ cRpAllocation = rb_define_class_under(mProf, "Allocation", rb_cObject);
248
276
  rb_undef_method(CLASS_OF(cRpAllocation), "new");
249
277
  rb_define_alloc_func(cRpAllocation, prof_allocation_allocate);
250
278
 
@@ -7,7 +7,7 @@
7
7
  #include "ruby_prof.h"
8
8
  #include "rp_method.h"
9
9
 
10
- typedef struct
10
+ typedef struct prof_allocation_t
11
11
  {
12
12
  st_data_t key; /* Key in hash table */
13
13
  unsigned int klass_flags; /* Information about the type of class */
@@ -13,14 +13,12 @@ prof_call_tree_t* prof_call_tree_create(prof_method_t* method, prof_call_tree_t*
13
13
  prof_call_tree_t* result = ALLOC(prof_call_tree_t);
14
14
  result->method = method;
15
15
  result->parent = parent;
16
- result->children = rb_st_init_numtable();
17
16
  result->object = Qnil;
18
- result->measurement = prof_measurement_create();
19
-
20
17
  result->visits = 0;
21
-
22
18
  result->source_line = source_line;
23
19
  result->source_file = source_file;
20
+ result->children = rb_st_init_numtable();
21
+ result->measurement = prof_measurement_create();
24
22
 
25
23
  return result;
26
24
  }
@@ -73,6 +71,9 @@ static int prof_call_tree_mark_children(st_data_t key, st_data_t value, st_data_
73
71
 
74
72
  void prof_call_tree_mark(void* data)
75
73
  {
74
+ if (!data)
75
+ return;
76
+
76
77
  prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
77
78
 
78
79
  if (call_tree->object != Qnil)
@@ -81,6 +82,7 @@ void prof_call_tree_mark(void* data)
81
82
  if (call_tree->source_file != Qnil)
82
83
  rb_gc_mark(call_tree->source_file);
83
84
 
85
+ prof_method_mark(call_tree->method);
84
86
  prof_measurement_mark(call_tree->measurement);
85
87
 
86
88
  // Recurse down through the whole call tree but only from the top node
@@ -91,8 +93,11 @@ void prof_call_tree_mark(void* data)
91
93
 
92
94
  static void prof_call_tree_ruby_gc_free(void* data)
93
95
  {
94
- prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
95
- call_tree->object = Qnil;
96
+ if (data)
97
+ {
98
+ prof_call_tree_t* call_tree = (prof_call_tree_t*)data;
99
+ call_tree->object = Qnil;
100
+ }
96
101
  }
97
102
 
98
103
  static int prof_call_tree_free_children(st_data_t key, st_data_t value, st_data_t data)
@@ -108,9 +113,7 @@ void prof_call_tree_free(prof_call_tree_t* call_tree_data)
108
113
  yes clean it up so to avoid a segmentation fault. */
109
114
  if (call_tree_data->object != Qnil)
110
115
  {
111
- RDATA(call_tree_data->object)->dmark = NULL;
112
- RDATA(call_tree_data->object)->dfree = NULL;
113
- RDATA(call_tree_data->object)->data = NULL;
116
+ RTYPEDDATA(call_tree_data->object)->data = NULL;
114
117
  call_tree_data->object = Qnil;
115
118
  }
116
119
 
@@ -130,11 +133,24 @@ size_t prof_call_tree_size(const void* data)
130
133
  return sizeof(prof_call_tree_t);
131
134
  }
132
135
 
136
+ static const rb_data_type_t call_tree_type =
137
+ {
138
+ .wrap_struct_name = "CallTree",
139
+ .function =
140
+ {
141
+ .dmark = prof_call_tree_mark,
142
+ .dfree = prof_call_tree_ruby_gc_free,
143
+ .dsize = prof_call_tree_size,
144
+ },
145
+ .data = NULL,
146
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
147
+ };
148
+
133
149
  VALUE prof_call_tree_wrap(prof_call_tree_t* call_tree)
134
150
  {
135
151
  if (call_tree->object == Qnil)
136
152
  {
137
- call_tree->object = Data_Wrap_Struct(cRpCallTree, prof_call_tree_mark, prof_call_tree_ruby_gc_free, call_tree);
153
+ call_tree->object = TypedData_Wrap_Struct(cRpCallTree, &call_tree_type, call_tree);
138
154
  }
139
155
  return call_tree->object;
140
156
  }
@@ -150,7 +166,7 @@ prof_call_tree_t* prof_get_call_tree(VALUE self)
150
166
  {
151
167
  /* Can't use Data_Get_Struct because that triggers the event hook
152
168
  ending up in endless recursion. */
153
- prof_call_tree_t* result = DATA_PTR(self);
169
+ prof_call_tree_t* result = RTYPEDDATA_DATA(self);
154
170
 
155
171
  if (!result)
156
172
  rb_raise(rb_eRuntimeError, "This RubyProf::CallTree instance has already been freed, likely because its profile has been freed.");
@@ -335,7 +351,7 @@ static VALUE prof_call_tree_load(VALUE self, VALUE data)
335
351
  void rp_init_call_tree()
336
352
  {
337
353
  /* CallTree */
338
- cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cData);
354
+ cRpCallTree = rb_define_class_under(mProf, "CallTree", rb_cObject);
339
355
  rb_undef_method(CLASS_OF(cRpCallTree), "new");
340
356
  rb_define_alloc_func(cRpCallTree, prof_call_tree_allocate);
341
357
 
@@ -14,7 +14,7 @@ prof_call_trees_t* prof_get_call_trees(VALUE self)
14
14
  {
15
15
  /* Can't use Data_Get_Struct because that triggers the event hook
16
16
  ending up in endless recursion. */
17
- prof_call_trees_t* result = DATA_PTR(self);
17
+ prof_call_trees_t* result = RTYPEDDATA_DATA(self);
18
18
 
19
19
  if (!result)
20
20
  rb_raise(rb_eRuntimeError, "This RubyProf::CallTrees instance has already been freed, likely because its profile has been freed.");
@@ -32,8 +32,11 @@ prof_call_trees_t* prof_call_trees_create()
32
32
  return result;
33
33
  }
34
34
 
35
- void prof_call_trees_mark(prof_call_trees_t* call_trees)
35
+ void prof_call_trees_mark(void* data)
36
36
  {
37
+ if (!data) return;
38
+
39
+ prof_call_trees_t* call_trees = (prof_call_trees_t*)data;
37
40
  prof_call_tree_t** call_tree;
38
41
  for (call_tree = call_trees->start; call_tree < call_trees->ptr; call_tree++)
39
42
  {
@@ -47,9 +50,7 @@ void prof_call_trees_free(prof_call_trees_t* call_trees)
47
50
  yes clean it up so to avoid a segmentation fault. */
48
51
  if (call_trees->object != Qnil)
49
52
  {
50
- RDATA(call_trees->object)->dmark = NULL;
51
- RDATA(call_trees->object)->dfree = NULL;
52
- RDATA(call_trees->object)->data = NULL;
53
+ RTYPEDDATA(call_trees->object)->data = NULL;
53
54
  call_trees->object = Qnil;
54
55
  }
55
56
 
@@ -59,9 +60,12 @@ void prof_call_trees_free(prof_call_trees_t* call_trees)
59
60
 
60
61
  void prof_call_trees_ruby_gc_free(void* data)
61
62
  {
62
- // This object gets freed by its owning method
63
- prof_call_trees_t* call_trees = (prof_call_trees_t*)data;
64
- call_trees->object = Qnil;
63
+ if (data)
64
+ {
65
+ // This object gets freed by its owning method
66
+ prof_call_trees_t* call_trees = (prof_call_trees_t*)data;
67
+ call_trees->object = Qnil;
68
+ }
65
69
  }
66
70
 
67
71
  static int prof_call_trees_collect_aggregates(st_data_t key, st_data_t value, st_data_t data)
@@ -94,11 +98,29 @@ static int prof_call_trees_collect_callees(st_data_t key, st_data_t value, st_da
94
98
  return ST_CONTINUE;
95
99
  }
96
100
 
101
+ size_t prof_call_trees_size(const void* data)
102
+ {
103
+ return sizeof(prof_call_trees_t);
104
+ }
105
+
106
+ static const rb_data_type_t call_trees_type =
107
+ {
108
+ .wrap_struct_name = "CallTrees",
109
+ .function =
110
+ {
111
+ .dmark = prof_call_trees_mark,
112
+ .dfree = prof_call_trees_ruby_gc_free,
113
+ .dsize = prof_call_trees_size,
114
+ },
115
+ .data = NULL,
116
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
117
+ };
118
+
97
119
  VALUE prof_call_trees_wrap(prof_call_trees_t* call_trees)
98
120
  {
99
121
  if (call_trees->object == Qnil)
100
122
  {
101
- call_trees->object = Data_Wrap_Struct(cRpCallTrees, prof_call_trees_mark, prof_call_trees_ruby_gc_free, call_trees);
123
+ call_trees->object = TypedData_Wrap_Struct(cRpCallTrees, &call_trees_type, call_trees);
102
124
  }
103
125
  return call_trees->object;
104
126
  }
@@ -235,7 +257,7 @@ VALUE prof_call_trees_dump(VALUE self)
235
257
  /* :nodoc: */
236
258
  VALUE prof_call_trees_load(VALUE self, VALUE data)
237
259
  {
238
- prof_call_trees_t* call_trees_data = DATA_PTR(self);
260
+ prof_call_trees_t* call_trees_data = prof_get_call_trees(self);
239
261
  call_trees_data->object = self;
240
262
 
241
263
  VALUE call_trees = rb_hash_aref(data, ID2SYM(rb_intern("call_trees")));
@@ -251,7 +273,7 @@ VALUE prof_call_trees_load(VALUE self, VALUE data)
251
273
 
252
274
  void rp_init_call_trees()
253
275
  {
254
- cRpCallTrees = rb_define_class_under(mProf, "CallTrees", rb_cData);
276
+ cRpCallTrees = rb_define_class_under(mProf, "CallTrees", rb_cObject);
255
277
  rb_undef_method(CLASS_OF(cRpCallTrees), "new");
256
278
  rb_define_alloc_func(cRpCallTrees, prof_call_trees_allocate);
257
279
 
@@ -20,7 +20,6 @@ typedef struct prof_call_trees_t
20
20
 
21
21
  void rp_init_call_trees();
22
22
  prof_call_trees_t* prof_call_trees_create();
23
- void prof_call_trees_mark(prof_call_trees_t* call_trees);
24
23
  void prof_call_trees_free(prof_call_trees_t* call_trees);
25
24
  prof_call_trees_t* prof_get_call_trees(VALUE self);
26
25
  void prof_add_call_tree(prof_call_trees_t* call_trees, prof_call_tree_t* call_tree);
@@ -53,6 +53,8 @@ prof_measurement_t* prof_measurement_create(void)
53
53
 
54
54
  void prof_measurement_mark(void* data)
55
55
  {
56
+ if (!data) return;
57
+
56
58
  prof_measurement_t* measurement_data = (prof_measurement_t*)data;
57
59
 
58
60
  if (measurement_data->object != Qnil)
@@ -61,9 +63,12 @@ void prof_measurement_mark(void* data)
61
63
 
62
64
  static void prof_measurement_ruby_gc_free(void* data)
63
65
  {
64
- // Measurements are freed by their owning object (call info or method)
65
- prof_measurement_t* measurement = (prof_measurement_t*)data;
66
- measurement->object = Qnil;
66
+ if (data)
67
+ {
68
+ // Measurements are freed by their owning object (call info or method)
69
+ prof_measurement_t* measurement = (prof_measurement_t*)data;
70
+ measurement->object = Qnil;
71
+ }
67
72
  }
68
73
 
69
74
  void prof_measurement_free(prof_measurement_t* measurement)
@@ -72,9 +77,7 @@ void prof_measurement_free(prof_measurement_t* measurement)
72
77
  yes clean it up so to avoid a segmentation fault. */
73
78
  if (measurement->object != Qnil)
74
79
  {
75
- RDATA(measurement->object)->dmark = NULL;
76
- RDATA(measurement->object)->dfree = NULL;
77
- RDATA(measurement->object)->data = NULL;
80
+ RTYPEDDATA(measurement->object)->data = NULL;
78
81
  measurement->object = Qnil;
79
82
  }
80
83
 
@@ -86,11 +89,24 @@ size_t prof_measurement_size(const void* data)
86
89
  return sizeof(prof_measurement_t);
87
90
  }
88
91
 
92
+ static const rb_data_type_t measurement_type =
93
+ {
94
+ .wrap_struct_name = "Measurement",
95
+ .function =
96
+ {
97
+ .dmark = prof_measurement_mark,
98
+ .dfree = prof_measurement_ruby_gc_free,
99
+ .dsize = prof_measurement_size,
100
+ },
101
+ .data = NULL,
102
+ .flags = RUBY_TYPED_FREE_IMMEDIATELY
103
+ };
104
+
89
105
  VALUE prof_measurement_wrap(prof_measurement_t* measurement)
90
106
  {
91
107
  if (measurement->object == Qnil)
92
108
  {
93
- measurement->object = Data_Wrap_Struct(cRpMeasurement, NULL, prof_measurement_ruby_gc_free, measurement);
109
+ measurement->object = TypedData_Wrap_Struct(cRpMeasurement, &measurement_type, measurement);
94
110
  }
95
111
  return measurement->object;
96
112
  }
@@ -106,7 +122,7 @@ prof_measurement_t* prof_get_measurement(VALUE self)
106
122
  {
107
123
  /* Can't use Data_Get_Struct because that triggers the event hook
108
124
  ending up in endless recursion. */
109
- prof_measurement_t* result = DATA_PTR(self);
125
+ prof_measurement_t* result = RTYPEDDATA_DATA(self);
110
126
 
111
127
  if (!result)
112
128
  rb_raise(rb_eRuntimeError, "This RubyProf::Measurement instance has already been freed, likely because its profile has been freed.");
@@ -206,7 +222,7 @@ void rp_init_measure()
206
222
  rp_init_measure_allocations();
207
223
  rp_init_measure_memory();
208
224
 
209
- cRpMeasurement = rb_define_class_under(mProf, "Measurement", rb_cData);
225
+ cRpMeasurement = rb_define_class_under(mProf, "Measurement", rb_cObject);
210
226
  rb_undef_method(CLASS_OF(cRpMeasurement), "new");
211
227
  rb_define_alloc_func(cRpMeasurement, prof_measurement_allocate);
212
228