thread_safety 0.1.2 → 0.1.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,442 @@
1
+ /*
2
+ * Vendored from Ruby v3_4_8: debug_counter.h
3
+ *
4
+ * This file is vendored because Ruby 3.4's modular GC implementation requires
5
+ * internal Ruby headers that are not installed with Ruby. Ruby 4.0+ added
6
+ * BUILDING_MODULAR_GC guards that provide inline implementations, but these
7
+ * were not backported to Ruby 3.4.x.
8
+ *
9
+ * Relevant Ruby commits that added the guards in 4.0:
10
+ * - 9130023cf5 (May 2025): Initial BUILDING_MODULAR_GC guards
11
+ * - 04f538c144 (May 2025): Additional guards for modular GC builds
12
+ *
13
+ * This vendoring can be removed when Ruby 3.4 support is dropped and the
14
+ * minimum Ruby version is 4.0+.
15
+ *
16
+ * License: Ruby License (same as Ruby itself)
17
+ * Original Copyright: (C) 2017 Koichi Sasada
18
+ */
19
+
20
+ /**********************************************************************
21
+
22
+ debug_counter.h -
23
+
24
+ created at: Tue Feb 21 16:51:18 2017
25
+
26
+ Copyright (C) 2017 Koichi Sasada
27
+
28
+ **********************************************************************/
29
+
30
+ #ifndef USE_DEBUG_COUNTER
31
+ #define USE_DEBUG_COUNTER 0
32
+ #endif
33
+
34
+ #ifdef RB_DEBUG_COUNTER
35
+
36
+ // method cache (IMC: inline method cache)
37
+ RB_DEBUG_COUNTER(mc_inline_hit) // IMC hit
38
+ RB_DEBUG_COUNTER(mc_inline_miss_klass) // IMC miss by different class
39
+ RB_DEBUG_COUNTER(mc_inline_miss_invalidated) // IMC miss by invalidated ME
40
+ RB_DEBUG_COUNTER(mc_inline_miss_empty) // IMC miss because prev is empty slot
41
+ RB_DEBUG_COUNTER(mc_inline_miss_same_cc) // IMC miss, but same CC
42
+ RB_DEBUG_COUNTER(mc_inline_miss_same_cme) // IMC miss, but same CME
43
+ RB_DEBUG_COUNTER(mc_inline_miss_same_def) // IMC miss, but same definition
44
+ RB_DEBUG_COUNTER(mc_inline_miss_diff) // IMC miss, different methods
45
+
46
+ RB_DEBUG_COUNTER(cvar_write_inline_hit) // cvar cache hit on write
47
+ RB_DEBUG_COUNTER(cvar_read_inline_hit) // cvar cache hit on read
48
+ RB_DEBUG_COUNTER(cvar_inline_miss) // miss inline cache
49
+ RB_DEBUG_COUNTER(cvar_class_invalidate) // invalidate cvar cache when define a cvar that's defined on a subclass
50
+ RB_DEBUG_COUNTER(cvar_include_invalidate) // invalidate cvar cache on module include or prepend
51
+
52
+ RB_DEBUG_COUNTER(mc_cme_complement) // number of acquiring complement CME
53
+ RB_DEBUG_COUNTER(mc_cme_complement_hit) // number of cache hit for complemented CME
54
+
55
+ RB_DEBUG_COUNTER(mc_search) // count for method lookup in class tree
56
+ RB_DEBUG_COUNTER(mc_search_notfound) // method lookup, but not found
57
+ RB_DEBUG_COUNTER(mc_search_super) // total traversed classes
58
+
59
+ // callinfo
60
+ RB_DEBUG_COUNTER(ci_packed) // number of packed CI
61
+ RB_DEBUG_COUNTER(ci_kw) // non-packed CI w/ keywords
62
+ RB_DEBUG_COUNTER(ci_nokw) // non-packed CI w/o keywords
63
+ RB_DEBUG_COUNTER(ci_runtime) // creating temporary CI
64
+
65
+ // callcache
66
+ RB_DEBUG_COUNTER(cc_new) // number of CC
67
+ RB_DEBUG_COUNTER(cc_temp) // dummy CC (stack-allocated)
68
+ RB_DEBUG_COUNTER(cc_found_in_ccs) // count for CC lookup success in CCS
69
+ RB_DEBUG_COUNTER(cc_not_found_in_ccs) // count for CC lookup success in CCS
70
+
71
+ RB_DEBUG_COUNTER(cc_ent_invalidate) // count for invalidating cc (cc->klass = 0)
72
+ RB_DEBUG_COUNTER(cc_cme_invalidate) // count for invalidating CME
73
+
74
+ RB_DEBUG_COUNTER(cc_invalidate_leaf) // count for invalidating klass if klass has no-subclasses
75
+ RB_DEBUG_COUNTER(cc_invalidate_leaf_ccs) // corresponding CCS
76
+ RB_DEBUG_COUNTER(cc_invalidate_leaf_callable) // complimented cache (no-subclasses)
77
+ RB_DEBUG_COUNTER(cc_invalidate_tree) // count for invalidating klass if klass has subclasses
78
+ RB_DEBUG_COUNTER(cc_invalidate_tree_cme) // cme if cme is found in this class or superclasses
79
+ RB_DEBUG_COUNTER(cc_invalidate_tree_callable) // complimented cache (subclasses)
80
+ RB_DEBUG_COUNTER(cc_invalidate_negative) // count for invalidating negative cache
81
+
82
+ RB_DEBUG_COUNTER(ccs_free) // count for free'ing ccs
83
+ RB_DEBUG_COUNTER(ccs_maxlen) // maximum length of ccs
84
+ RB_DEBUG_COUNTER(ccs_found) // count for finding corresponding ccs on method lookup
85
+ RB_DEBUG_COUNTER(ccs_not_found) // count for not found corresponding ccs on method lookup
86
+
87
+ // vm_eval.c
88
+ RB_DEBUG_COUNTER(call0_public)
89
+ RB_DEBUG_COUNTER(call0_other)
90
+ RB_DEBUG_COUNTER(gccct_hit)
91
+ RB_DEBUG_COUNTER(gccct_miss)
92
+ RB_DEBUG_COUNTER(gccct_null)
93
+
94
+ // iseq
95
+ RB_DEBUG_COUNTER(iseq_num) // number of total created iseq
96
+ RB_DEBUG_COUNTER(iseq_cd_num) // number of total created cd (call_data)
97
+
98
+ /*
99
+ * call cache fastpath usage
100
+ */
101
+ RB_DEBUG_COUNTER(ccf_general)
102
+ RB_DEBUG_COUNTER(ccf_iseq_setup)
103
+ RB_DEBUG_COUNTER(ccf_iseq_setup_0start)
104
+ RB_DEBUG_COUNTER(ccf_iseq_setup_tailcall_0start)
105
+ RB_DEBUG_COUNTER(ccf_iseq_fix) /* several functions created with tool/mk_call_iseq_optimized.rb */
106
+ RB_DEBUG_COUNTER(ccf_iseq_opt) /* has_opt == TRUE (has optional parameters), but other flags are FALSE */
107
+ RB_DEBUG_COUNTER(ccf_iseq_kw1) /* vm_call_iseq_setup_kwparm_kwarg() */
108
+ RB_DEBUG_COUNTER(ccf_iseq_kw2) /* vm_call_iseq_setup_kwparm_nokwarg() */
109
+ RB_DEBUG_COUNTER(ccf_cfunc)
110
+ RB_DEBUG_COUNTER(ccf_cfunc_with_frame)
111
+ RB_DEBUG_COUNTER(ccf_ivar) /* attr_reader */
112
+ RB_DEBUG_COUNTER(ccf_attrset) /* attr_writer */
113
+ RB_DEBUG_COUNTER(ccf_method_missing)
114
+ RB_DEBUG_COUNTER(ccf_zsuper)
115
+ RB_DEBUG_COUNTER(ccf_bmethod)
116
+ RB_DEBUG_COUNTER(ccf_opt_send)
117
+ RB_DEBUG_COUNTER(ccf_opt_call)
118
+ RB_DEBUG_COUNTER(ccf_opt_block_call)
119
+ RB_DEBUG_COUNTER(ccf_opt_struct_aref)
120
+ RB_DEBUG_COUNTER(ccf_opt_struct_aset)
121
+ RB_DEBUG_COUNTER(ccf_super_method)
122
+ RB_DEBUG_COUNTER(ccf_cfunc_other)
123
+ RB_DEBUG_COUNTER(ccf_cfunc_only_splat)
124
+ RB_DEBUG_COUNTER(ccf_cfunc_only_splat_kw)
125
+ RB_DEBUG_COUNTER(ccf_iseq_bmethod)
126
+ RB_DEBUG_COUNTER(ccf_noniseq_bmethod)
127
+ RB_DEBUG_COUNTER(ccf_opt_send_complex)
128
+ RB_DEBUG_COUNTER(ccf_opt_send_simple)
129
+
130
+ /*
131
+ * control frame push counts.
132
+ *
133
+ * * frame_push: frame push counts.
134
+ * * frame_push_*: frame push counts per each type.
135
+ * * frame_R2R: Ruby frame to Ruby frame
136
+ * * frame_R2C: Ruby frame to C frame
137
+ * * frame_C2C: C frame to C frame
138
+ * * frame_C2R: C frame to Ruby frame
139
+ */
140
+ RB_DEBUG_COUNTER(frame_push)
141
+ RB_DEBUG_COUNTER(frame_push_method)
142
+ RB_DEBUG_COUNTER(frame_push_block)
143
+ RB_DEBUG_COUNTER(frame_push_class)
144
+ RB_DEBUG_COUNTER(frame_push_top)
145
+ RB_DEBUG_COUNTER(frame_push_cfunc)
146
+ RB_DEBUG_COUNTER(frame_push_ifunc)
147
+ RB_DEBUG_COUNTER(frame_push_eval)
148
+ RB_DEBUG_COUNTER(frame_push_rescue)
149
+ RB_DEBUG_COUNTER(frame_push_dummy)
150
+
151
+ RB_DEBUG_COUNTER(frame_R2R)
152
+ RB_DEBUG_COUNTER(frame_R2C)
153
+ RB_DEBUG_COUNTER(frame_C2C)
154
+ RB_DEBUG_COUNTER(frame_C2R)
155
+
156
+ /* instance variable counts */
157
+ RB_DEBUG_COUNTER(ivar_get_obj_hit) // Only T_OBJECT hits
158
+ RB_DEBUG_COUNTER(ivar_get_obj_miss) // Only T_OBJECT misses
159
+ RB_DEBUG_COUNTER(ivar_get_ic_hit) // All hits
160
+ RB_DEBUG_COUNTER(ivar_get_ic_miss) // All misses
161
+ RB_DEBUG_COUNTER(ivar_set_ic_hit) // All hits
162
+ RB_DEBUG_COUNTER(ivar_set_obj_hit) // Only T_OBJECT hits
163
+ RB_DEBUG_COUNTER(ivar_set_obj_miss) // Only T_OBJECT misses
164
+ RB_DEBUG_COUNTER(ivar_set_ic_miss) // All misses
165
+ RB_DEBUG_COUNTER(ivar_set_ic_miss_noobject) // Miss because non T_OBJECT
166
+ RB_DEBUG_COUNTER(ivar_get_base) // Calls to `rb_ivar_get` (very slow path)
167
+ RB_DEBUG_COUNTER(ivar_set_base) // Calls to `ivar_set` (very slow path)
168
+ RB_DEBUG_COUNTER(ivar_get_ic_miss_set) // Misses on IV reads where the cache was wrong
169
+ RB_DEBUG_COUNTER(ivar_get_cc_miss_set) // Misses on attr_reader where the cache was wrong
170
+ RB_DEBUG_COUNTER(ivar_get_ic_miss_unset) // Misses on IV read where the cache wasn't set
171
+ RB_DEBUG_COUNTER(ivar_get_cc_miss_unset) // Misses on attr_reader where the cache wasn't set
172
+
173
+ /* local variable counts
174
+ *
175
+ * * lvar_get: total lvar get counts (VM insn)
176
+ * * lvar_get_dynamic: lvar get counts if accessing upper env (VM insn)
177
+ * * lvar_set*: same as "get"
178
+ * * lvar_set_slowpath: counts using vm_env_write_slowpath()
179
+ */
180
+ RB_DEBUG_COUNTER(lvar_get)
181
+ RB_DEBUG_COUNTER(lvar_get_dynamic)
182
+ RB_DEBUG_COUNTER(lvar_set)
183
+ RB_DEBUG_COUNTER(lvar_set_dynamic)
184
+ RB_DEBUG_COUNTER(lvar_set_slowpath)
185
+
186
+ /* GC counts:
187
+ *
188
+ * * count: simple count
189
+ * * _minor: minor gc
190
+ * * _major: major gc
191
+ * * other suffix is corresponding to last_gc_info or
192
+ * gc_profile_record_flag in gc.c.
193
+ */
194
+ RB_DEBUG_COUNTER(gc_count)
195
+ RB_DEBUG_COUNTER(gc_minor_newobj)
196
+ RB_DEBUG_COUNTER(gc_minor_malloc)
197
+ RB_DEBUG_COUNTER(gc_minor_method)
198
+ RB_DEBUG_COUNTER(gc_minor_capi)
199
+ RB_DEBUG_COUNTER(gc_minor_stress)
200
+ RB_DEBUG_COUNTER(gc_major_nofree)
201
+ RB_DEBUG_COUNTER(gc_major_oldgen)
202
+ RB_DEBUG_COUNTER(gc_major_shady)
203
+ RB_DEBUG_COUNTER(gc_major_force)
204
+ RB_DEBUG_COUNTER(gc_major_oldmalloc)
205
+
206
+ RB_DEBUG_COUNTER(gc_enter_start)
207
+ RB_DEBUG_COUNTER(gc_enter_continue)
208
+ RB_DEBUG_COUNTER(gc_enter_rest)
209
+ RB_DEBUG_COUNTER(gc_enter_finalizer)
210
+
211
+ RB_DEBUG_COUNTER(gc_isptr_trial)
212
+ RB_DEBUG_COUNTER(gc_isptr_range)
213
+ RB_DEBUG_COUNTER(gc_isptr_align)
214
+ RB_DEBUG_COUNTER(gc_isptr_maybe)
215
+
216
+ /* object allocation counts:
217
+ *
218
+ * * obj_newobj: newobj counts
219
+ * * obj_newobj_slowpath: newobj with slowpath counts
220
+ * * obj_newobj_wb_unprotected: newobj for wb_unprotected.
221
+ * * obj_free: obj_free() counts
222
+ * * obj_promote: promoted counts (oldgen)
223
+ * * obj_wb_unprotect: wb unprotect counts
224
+ *
225
+ * * obj_[type]_[attr]: *free'ed counts* for each type.
226
+ * Note that it is not a allocated counts.
227
+ * * [type]
228
+ * * _obj: T_OBJECT
229
+ * * _str: T_STRING
230
+ * * _ary: T_ARRAY
231
+ * * _xxx: T_XXX (hash, struct, ...)
232
+ *
233
+ * * [attr]
234
+ * * _ptr: R?? is not embed.
235
+ * * _embed: R?? is embed.
236
+ * * type specific attr.
237
+ * * str_shared: str is shared.
238
+ * * str_nofree: nofree
239
+ * * str_fstr: fstr
240
+ * * hash_empty: hash is empty
241
+ * * hash_1_4: has 1 to 4 entries
242
+ * * hash_5_8: has 5 to 8 entries
243
+ * * hash_g8: has n entries (n>8)
244
+ * * match_under4: has under 4 oniguruma regions allocated
245
+ * * match_ge4: has n regions allocated (4<=n<8)
246
+ * * match_ge8: has n regions allocated (8<=n)
247
+ * * data_empty: T_DATA but no memory free.
248
+ * * data_xfree: free'ed by xfree().
249
+ * * data_imm_free: free'ed immediately.
250
+ * * data_zombie: free'ed with zombie.
251
+ * * imemo_*: T_IMEMO with each type.
252
+ */
253
+ RB_DEBUG_COUNTER(obj_newobj)
254
+ RB_DEBUG_COUNTER(obj_newobj_slowpath)
255
+ RB_DEBUG_COUNTER(obj_newobj_wb_unprotected)
256
+ RB_DEBUG_COUNTER(obj_free)
257
+ RB_DEBUG_COUNTER(obj_promote)
258
+ RB_DEBUG_COUNTER(obj_wb_unprotect)
259
+
260
+ RB_DEBUG_COUNTER(obj_obj_embed)
261
+ RB_DEBUG_COUNTER(obj_obj_ptr)
262
+ RB_DEBUG_COUNTER(obj_obj_too_complex)
263
+
264
+ RB_DEBUG_COUNTER(obj_str_ptr)
265
+ RB_DEBUG_COUNTER(obj_str_embed)
266
+ RB_DEBUG_COUNTER(obj_str_shared)
267
+ RB_DEBUG_COUNTER(obj_str_nofree)
268
+ RB_DEBUG_COUNTER(obj_str_fstr)
269
+
270
+ RB_DEBUG_COUNTER(obj_ary_embed)
271
+ RB_DEBUG_COUNTER(obj_ary_ptr)
272
+ RB_DEBUG_COUNTER(obj_ary_extracapa)
273
+ /*
274
+ ary_shared_create: shared ary by Array#dup and so on.
275
+ ary_shared: finished in shard.
276
+ ary_shared_root_occupied: shared_root but has only 1 refcnt.
277
+ The number (ary_shared - ary_shared_root_occupied) is meaningful.
278
+ */
279
+ RB_DEBUG_COUNTER(obj_ary_shared_create)
280
+ RB_DEBUG_COUNTER(obj_ary_shared)
281
+ RB_DEBUG_COUNTER(obj_ary_shared_root_occupied)
282
+
283
+ RB_DEBUG_COUNTER(obj_hash_empty)
284
+ RB_DEBUG_COUNTER(obj_hash_1)
285
+ RB_DEBUG_COUNTER(obj_hash_2)
286
+ RB_DEBUG_COUNTER(obj_hash_3)
287
+ RB_DEBUG_COUNTER(obj_hash_4)
288
+ RB_DEBUG_COUNTER(obj_hash_5_8)
289
+ RB_DEBUG_COUNTER(obj_hash_g8)
290
+
291
+ RB_DEBUG_COUNTER(obj_hash_null)
292
+ RB_DEBUG_COUNTER(obj_hash_ar)
293
+ RB_DEBUG_COUNTER(obj_hash_st)
294
+ RB_DEBUG_COUNTER(obj_hash_force_convert)
295
+
296
+ RB_DEBUG_COUNTER(obj_struct_embed)
297
+ RB_DEBUG_COUNTER(obj_struct_ptr)
298
+
299
+ RB_DEBUG_COUNTER(obj_data_empty)
300
+ RB_DEBUG_COUNTER(obj_data_xfree)
301
+ RB_DEBUG_COUNTER(obj_data_imm_free)
302
+ RB_DEBUG_COUNTER(obj_data_zombie)
303
+
304
+ RB_DEBUG_COUNTER(obj_match_under4)
305
+ RB_DEBUG_COUNTER(obj_match_ge4)
306
+ RB_DEBUG_COUNTER(obj_match_ge8)
307
+ RB_DEBUG_COUNTER(obj_match_ptr)
308
+
309
+ RB_DEBUG_COUNTER(obj_iclass_ptr)
310
+ RB_DEBUG_COUNTER(obj_class_ptr)
311
+ RB_DEBUG_COUNTER(obj_module_ptr)
312
+
313
+ RB_DEBUG_COUNTER(obj_bignum_ptr)
314
+ RB_DEBUG_COUNTER(obj_bignum_embed)
315
+ RB_DEBUG_COUNTER(obj_float)
316
+ RB_DEBUG_COUNTER(obj_complex)
317
+ RB_DEBUG_COUNTER(obj_rational)
318
+
319
+ RB_DEBUG_COUNTER(obj_regexp_ptr)
320
+ RB_DEBUG_COUNTER(obj_file_ptr)
321
+ RB_DEBUG_COUNTER(obj_symbol)
322
+
323
+ RB_DEBUG_COUNTER(obj_imemo_ment)
324
+ RB_DEBUG_COUNTER(obj_imemo_iseq)
325
+ RB_DEBUG_COUNTER(obj_imemo_env)
326
+ RB_DEBUG_COUNTER(obj_imemo_tmpbuf)
327
+ RB_DEBUG_COUNTER(obj_imemo_ast)
328
+ RB_DEBUG_COUNTER(obj_imemo_cref)
329
+ RB_DEBUG_COUNTER(obj_imemo_svar)
330
+ RB_DEBUG_COUNTER(obj_imemo_throw_data)
331
+ RB_DEBUG_COUNTER(obj_imemo_ifunc)
332
+ RB_DEBUG_COUNTER(obj_imemo_memo)
333
+ RB_DEBUG_COUNTER(obj_imemo_parser_strterm)
334
+ RB_DEBUG_COUNTER(obj_imemo_callinfo)
335
+ RB_DEBUG_COUNTER(obj_imemo_callcache)
336
+ RB_DEBUG_COUNTER(obj_imemo_constcache)
337
+
338
+ /* ar_table */
339
+ RB_DEBUG_COUNTER(artable_hint_hit)
340
+ RB_DEBUG_COUNTER(artable_hint_miss)
341
+ RB_DEBUG_COUNTER(artable_hint_notfound)
342
+
343
+ /* heap function counts
344
+ *
345
+ * * heap_xmalloc/realloc/xfree: call counts
346
+ */
347
+ RB_DEBUG_COUNTER(heap_xmalloc)
348
+ RB_DEBUG_COUNTER(heap_xrealloc)
349
+ RB_DEBUG_COUNTER(heap_xfree)
350
+
351
+ // VM sync
352
+ RB_DEBUG_COUNTER(vm_sync_lock)
353
+ RB_DEBUG_COUNTER(vm_sync_lock_enter)
354
+ RB_DEBUG_COUNTER(vm_sync_lock_enter_nb)
355
+ RB_DEBUG_COUNTER(vm_sync_lock_enter_cr)
356
+ RB_DEBUG_COUNTER(vm_sync_barrier)
357
+
358
+ /* load (not implemented yet) */
359
+ /*
360
+ RB_DEBUG_COUNTER(load_files)
361
+ RB_DEBUG_COUNTER(load_path_is_not_realpath)
362
+ */
363
+ #endif
364
+
365
+ #ifndef RUBY_DEBUG_COUNTER_H
366
+ #define RUBY_DEBUG_COUNTER_H 1
367
+
368
+ #include "ruby/internal/config.h"
369
+ #include <stddef.h> /* for size_t */
370
+ #include "ruby/ruby.h" /* for VALUE */
371
+
372
+ #if !defined(__GNUC__) && USE_DEBUG_COUNTER
373
+ #error "USE_DEBUG_COUNTER is not supported by other than __GNUC__"
374
+ #endif
375
+
376
+ enum rb_debug_counter_type {
377
+ #define RB_DEBUG_COUNTER(name) RB_DEBUG_COUNTER_##name,
378
+ #include "debug_counter.h"
379
+ RB_DEBUG_COUNTER_MAX
380
+ #undef RB_DEBUG_COUNTER
381
+ };
382
+
383
+ #if USE_DEBUG_COUNTER
384
+ extern size_t rb_debug_counter[];
385
+ RUBY_EXTERN struct rb_ractor_struct *ruby_single_main_ractor;
386
+ RUBY_EXTERN void rb_debug_counter_add_atomic(enum rb_debug_counter_type type, int add);
387
+
388
+ inline static int
389
+ rb_debug_counter_add(enum rb_debug_counter_type type, int add, int cond)
390
+ {
391
+ if (cond) {
392
+ if (ruby_single_main_ractor != NULL) {
393
+ rb_debug_counter[(int)type] += add;
394
+ }
395
+ else {
396
+ rb_debug_counter_add_atomic(type, add);
397
+ }
398
+ }
399
+ return cond;
400
+ }
401
+
402
+ inline static int
403
+ rb_debug_counter_max(enum rb_debug_counter_type type, unsigned int num)
404
+ {
405
+ // TODO: sync
406
+ if (rb_debug_counter[(int)type] < num) {
407
+ rb_debug_counter[(int)type] = num;
408
+ return 1;
409
+ }
410
+ else {
411
+ return 0;
412
+ }
413
+ }
414
+
415
+ VALUE rb_debug_counter_reset(VALUE klass);
416
+ VALUE rb_debug_counter_show(VALUE klass);
417
+
418
+ #define RB_DEBUG_COUNTER_INC(type) rb_debug_counter_add(RB_DEBUG_COUNTER_##type, 1, 1)
419
+ #define RB_DEBUG_COUNTER_INC_UNLESS(type, cond) (!rb_debug_counter_add(RB_DEBUG_COUNTER_##type, 1, !(cond)))
420
+ #define RB_DEBUG_COUNTER_INC_IF(type, cond) rb_debug_counter_add(RB_DEBUG_COUNTER_##type, 1, !!(cond))
421
+ #define RB_DEBUG_COUNTER_ADD(type, num) rb_debug_counter_add(RB_DEBUG_COUNTER_##type, (num), 1)
422
+ #define RB_DEBUG_COUNTER_SETMAX(type, num) rb_debug_counter_max(RB_DEBUG_COUNTER_##type, (unsigned int)(num))
423
+
424
+ #else
425
+ #define RB_DEBUG_COUNTER_INC(type) ((void)0)
426
+ #define RB_DEBUG_COUNTER_INC_UNLESS(type, cond) (!!(cond))
427
+ #define RB_DEBUG_COUNTER_INC_IF(type, cond) (!!(cond))
428
+ #define RB_DEBUG_COUNTER_ADD(type, num) ((void)0)
429
+ #define RB_DEBUG_COUNTER_SETMAX(type, num) 0
430
+ #endif
431
+
432
+ void rb_debug_counter_show_results(const char *msg);
433
+
434
+ RUBY_SYMBOL_EXPORT_BEGIN
435
+
436
+ size_t ruby_debug_counter_get(const char **names_ptr, size_t *counters_ptr);
437
+ void ruby_debug_counter_reset(void);
438
+ void ruby_debug_counter_show_at_exit(int enable);
439
+
440
+ RUBY_SYMBOL_EXPORT_END
441
+
442
+ #endif /* RUBY_DEBUG_COUNTER_H */