memory-profiler 1.0.3 → 1.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,576 @@
1
+ // Released under the MIT License.
2
+ // Copyright, 2025, by Samuel Williams.
3
+
4
+ #include "capture.h"
5
+
6
+ #include "ruby.h"
7
+ #include "ruby/debug.h"
8
+ #include "ruby/st.h"
9
+ #include <stdatomic.h>
10
+ #include <stdio.h>
11
+
12
+ enum {
13
+ DEBUG = 0,
14
+ };
15
+
16
+ static VALUE Memory_Profiler_Capture = Qnil;
17
+ static VALUE Memory_Profiler_Allocations = Qnil;
18
+
19
+ // Event symbols
20
+ static VALUE sym_newobj;
21
+ static VALUE sym_freeobj;
22
+
23
+ // Per-class allocation tracking record
24
+ struct Memory_Profiler_Capture_Allocations {
25
+ VALUE callback; // Optional Ruby proc/lambda to call on allocation
26
+ size_t new_count; // Total allocations seen since tracking started
27
+ size_t free_count; // Total frees seen since tracking started
28
+ // Live count = new_count - free_count
29
+
30
+ // For detailed tracking: map object (VALUE) => state (VALUE)
31
+ // State is returned from callback on :newobj and passed back on :freeobj
32
+ st_table *object_states;
33
+ };
34
+
35
+ // Main capture state
36
+ struct Memory_Profiler_Capture {
37
+ // class => Memory_Profiler_Capture_Allocations.
38
+ st_table *tracked_classes;
39
+
40
+ // Is tracking enabled (via start/stop):
41
+ int enabled;
42
+ };
43
+
44
+ // Helper to mark object_states table values
45
+ static int Memory_Profiler_Capture_mark_state(st_data_t key, st_data_t value, st_data_t arg) {
46
+ // key is VALUE (object) - don't mark it, we're just using it as a key
47
+ // value is VALUE (state) - mark it as movable
48
+ rb_gc_mark_movable((VALUE)value);
49
+ return ST_CONTINUE;
50
+ }
51
+
52
+ // GC mark callback for tracked_classes table
53
+ static int Memory_Profiler_Capture_mark_class(st_data_t key, st_data_t value, st_data_t arg) {
54
+ VALUE klass = (VALUE)key;
55
+
56
+ // Mark class as un-movable as we don't want it moving in freeobj.
57
+ rb_gc_mark(klass);
58
+
59
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)value;
60
+ if (!NIL_P(record->callback)) {
61
+ rb_gc_mark_movable(record->callback); // Mark callback as movable
62
+ }
63
+
64
+ // Mark object_states table if it exists
65
+ if (record->object_states) {
66
+ st_foreach(record->object_states, Memory_Profiler_Capture_mark_state, 0);
67
+ }
68
+
69
+ return ST_CONTINUE;
70
+ }
71
+
72
+ // GC mark function
73
+ static void Memory_Profiler_Capture_mark(void *ptr) {
74
+ struct Memory_Profiler_Capture *capture = ptr;
75
+
76
+ if (!capture) {
77
+ return;
78
+ }
79
+
80
+ if (capture->tracked_classes) {
81
+ st_foreach(capture->tracked_classes, Memory_Profiler_Capture_mark_class, 0);
82
+ }
83
+ }
84
+
85
+ // Iterator to free each class record
86
+ static int Memory_Profiler_Capture_free_class_record(st_data_t key, st_data_t value, st_data_t arg) {
87
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)value;
88
+ if (record->object_states) {
89
+ st_free_table(record->object_states);
90
+ }
91
+ xfree(record);
92
+ return ST_CONTINUE;
93
+ }
94
+
95
+ // GC free function
96
+ static void Memory_Profiler_Capture_free(void *ptr) {
97
+ struct Memory_Profiler_Capture *capture = ptr;
98
+
99
+ // Event hooks must be removed via stop() before object is freed
100
+ // Ruby will automatically remove hooks when the VALUE is GC'd
101
+
102
+ if (capture->tracked_classes) {
103
+ st_foreach(capture->tracked_classes, Memory_Profiler_Capture_free_class_record, 0);
104
+ st_free_table(capture->tracked_classes);
105
+ }
106
+
107
+ xfree(capture);
108
+ }
109
+
110
+ // GC memsize function
111
+ static size_t Memory_Profiler_Capture_memsize(const void *ptr) {
112
+ const struct Memory_Profiler_Capture *capture = ptr;
113
+ size_t size = sizeof(struct Memory_Profiler_Capture);
114
+
115
+ if (capture->tracked_classes) {
116
+ size += capture->tracked_classes->num_entries * (sizeof(st_data_t) + sizeof(struct Memory_Profiler_Capture_Allocations));
117
+ }
118
+
119
+ return size;
120
+ }
121
+
122
+
123
+ // Callback for st_foreach_with_replace to update class keys and callback values
124
+ static int Memory_Profiler_Capture_update_refs(st_data_t *key, st_data_t *value, st_data_t argp, int existing) {
125
+ // Update class key if it moved
126
+ VALUE old_klass = (VALUE)*key;
127
+ VALUE new_klass = rb_gc_location(old_klass);
128
+ if (old_klass != new_klass) {
129
+ *key = (st_data_t)new_klass;
130
+ }
131
+
132
+ // Update callback if it moved
133
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)*value;
134
+ if (!NIL_P(record->callback)) {
135
+ VALUE old_callback = record->callback;
136
+ VALUE new_callback = rb_gc_location(old_callback);
137
+ if (old_callback != new_callback) {
138
+ record->callback = new_callback;
139
+ }
140
+ }
141
+
142
+ return ST_CONTINUE;
143
+ }
144
+
145
+ // GC compact function - update VALUEs when GC compaction moves objects
146
+ static void Memory_Profiler_Capture_compact(void *ptr) {
147
+ struct Memory_Profiler_Capture *capture = ptr;
148
+
149
+ // Update tracked_classes keys and callback values in-place
150
+ if (capture->tracked_classes && capture->tracked_classes->num_entries > 0) {
151
+ if (st_foreach_with_replace(capture->tracked_classes, NULL, Memory_Profiler_Capture_update_refs, 0)) {
152
+ rb_raise(rb_eRuntimeError, "tracked_classes modified during GC compaction");
153
+ }
154
+ }
155
+ }
156
+
157
+ static const rb_data_type_t Memory_Profiler_Capture_type = {
158
+ "Memory::Profiler::Capture",
159
+ {
160
+ .dmark = Memory_Profiler_Capture_mark,
161
+ .dcompact = Memory_Profiler_Capture_compact,
162
+ .dfree = Memory_Profiler_Capture_free,
163
+ .dsize = Memory_Profiler_Capture_memsize,
164
+ },
165
+ 0, 0, RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED
166
+ };
167
+
168
+ const char *event_flag_name(rb_event_flag_t event_flag) {
169
+ switch (event_flag) {
170
+ case RUBY_EVENT_CALL: return "call";
171
+ case RUBY_EVENT_C_CALL: return "c-call";
172
+ case RUBY_EVENT_B_CALL: return "b-call";
173
+ case RUBY_EVENT_RETURN: return "return";
174
+ case RUBY_EVENT_C_RETURN: return "c-return";
175
+ case RUBY_EVENT_B_RETURN: return "b-return";
176
+ case RUBY_INTERNAL_EVENT_NEWOBJ: return "newobj";
177
+ case RUBY_INTERNAL_EVENT_FREEOBJ: return "freeobj";
178
+ case RUBY_INTERNAL_EVENT_GC_START: return "gc-start";
179
+ case RUBY_INTERNAL_EVENT_GC_END_MARK: return "gc-end-mark";
180
+ case RUBY_INTERNAL_EVENT_GC_END_SWEEP: return "gc-end-sweep";
181
+ case RUBY_EVENT_LINE: return "line";
182
+ default: return "unknown";
183
+ }
184
+ }
185
+
186
+ // Handler for NEWOBJ event
187
+ static void Memory_Profiler_Capture_newobj_handler(struct Memory_Profiler_Capture *capture, VALUE klass, VALUE object) {
188
+ st_data_t record_data;
189
+ if (st_lookup(capture->tracked_classes, (st_data_t)klass, &record_data)) {
190
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)record_data;
191
+ record->new_count++;
192
+ if (!NIL_P(record->callback)) {
193
+ // Invoke callback - runs during NEWOBJ with GC disabled
194
+ // CRITICAL CALLBACK REQUIREMENTS:
195
+ // - Must be FAST (runs on EVERY allocation)
196
+ // - Must NOT call GC.start (will deadlock)
197
+ // - Must NOT block/sleep (stalls all allocations system-wide)
198
+ // - Should NOT raise exceptions (will propagate to allocating code)
199
+ // - Avoid allocating objects (causes re-entry)
200
+
201
+ // Call with (klass, :newobj, nil) - callback returns state to store
202
+ VALUE state = rb_funcall(record->callback, rb_intern("call"), 3, klass, sym_newobj, Qnil);
203
+
204
+ // Store the state if callback returned something
205
+ if (!NIL_P(state)) {
206
+ if (!record->object_states) {
207
+ record->object_states = st_init_numtable();
208
+ }
209
+ st_insert(record->object_states, (st_data_t)object, (st_data_t)state);
210
+ }
211
+ }
212
+ } else {
213
+ // Create record for this class (first time seeing it)
214
+ struct Memory_Profiler_Capture_Allocations *record = ALLOC(struct Memory_Profiler_Capture_Allocations);
215
+ record->callback = Qnil;
216
+ record->new_count = 1; // This is the first allocation
217
+ record->free_count = 0;
218
+ record->object_states = NULL;
219
+ st_insert(capture->tracked_classes, (st_data_t)klass, (st_data_t)record);
220
+ }
221
+ }
222
+
223
+ // Handler for FREEOBJ event
224
+ static void Memory_Profiler_Capture_freeobj_handler(struct Memory_Profiler_Capture *capture, VALUE klass, VALUE object) {
225
+ st_data_t record_data;
226
+ if (st_lookup(capture->tracked_classes, (st_data_t)klass, &record_data)) {
227
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)record_data;
228
+ record->free_count++;
229
+ if (!NIL_P(record->callback) && record->object_states) {
230
+ // Look up state stored during NEWOBJ
231
+ st_data_t state_data;
232
+ if (st_delete(record->object_states, (st_data_t *)&object, &state_data)) {
233
+ VALUE state = (VALUE)state_data;
234
+ // Call with (klass, :freeobj, state)
235
+ rb_funcall(record->callback, rb_intern("call"), 3, klass, sym_freeobj, state);
236
+ }
237
+ }
238
+ }
239
+ }
240
+
241
+ // Check if object type is trackable (has valid class pointer and is a normal Ruby object)
242
+ // Excludes internal types (T_IMEMO, T_NODE, T_ICLASS, etc.) that don't have normal classes
243
+ int Memory_Profiler_Capture_trackable_p(VALUE object) {
244
+ switch (rb_type(object)) {
245
+ // Normal Ruby objects with valid class pointers
246
+ case RUBY_T_OBJECT: // Plain objects
247
+ case RUBY_T_CLASS: // Class objects
248
+ case RUBY_T_MODULE: // Module objects
249
+ case RUBY_T_STRING: // Strings
250
+ case RUBY_T_ARRAY: // Arrays
251
+ case RUBY_T_HASH: // Hashes
252
+ case RUBY_T_STRUCT: // Structs
253
+ case RUBY_T_BIGNUM: // Big integers
254
+ case RUBY_T_FLOAT: // Floating point numbers
255
+ case RUBY_T_FILE: // File objects
256
+ case RUBY_T_DATA: // C extension data
257
+ case RUBY_T_MATCH: // Regex match data
258
+ case RUBY_T_COMPLEX: // Complex numbers
259
+ case RUBY_T_RATIONAL: // Rational numbers
260
+ case RUBY_T_REGEXP: // Regular expressions
261
+ return true;
262
+
263
+ // Skip internal types (don't have normal class pointers):
264
+ // - T_IMEMO: Internal memory objects (use class field for other data)
265
+ // - T_NODE: AST nodes (internal)
266
+ // - T_ICLASS: Include wrappers (internal)
267
+ // - T_ZOMBIE: Finalizing objects (internal)
268
+ // - T_MOVED: Compaction placeholders (internal)
269
+ // - T_NONE: Uninitialized slots
270
+ // - T_UNDEF: Undefined value marker
271
+ default:
272
+ return false;
273
+ }
274
+ }
275
+
276
+ // Event hook callback with RAW_ARG
277
+ // Signature: (VALUE data, rb_trace_arg_t *trace_arg)
278
+ static void Memory_Profiler_Capture_event_callback(VALUE data, void *ptr) {
279
+ rb_trace_arg_t *trace_arg = (rb_trace_arg_t *)ptr;
280
+
281
+ struct Memory_Profiler_Capture *capture;
282
+ TypedData_Get_Struct(data, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
283
+
284
+ if (!capture->enabled) return;
285
+
286
+ VALUE object = rb_tracearg_object(trace_arg);
287
+
288
+ // We don't want to track internal non-Object allocations:
289
+ if (!Memory_Profiler_Capture_trackable_p(object)) return;
290
+
291
+ rb_event_flag_t event_flag = rb_tracearg_event_flag(trace_arg);
292
+ VALUE klass = rb_class_of(object);
293
+ if (!klass) return;
294
+
295
+ if (DEBUG) {
296
+ const char *klass_name = rb_class2name(klass);
297
+ fprintf(stderr, "Memory_Profiler_Capture_event_callback: %s, Object: %p, Class: %p (%s)\n", event_flag_name(event_flag), (void *)object, (void *)klass, klass_name);
298
+ }
299
+
300
+ if (event_flag == RUBY_INTERNAL_EVENT_NEWOBJ) {
301
+ // self is the newly allocated object
302
+ Memory_Profiler_Capture_newobj_handler(capture, klass, object);
303
+ } else if (event_flag == RUBY_INTERNAL_EVENT_FREEOBJ) {
304
+ // self is the object being freed
305
+ Memory_Profiler_Capture_freeobj_handler(capture, klass, object);
306
+ }
307
+ }
308
+
309
+ // Allocate new capture
310
+ static VALUE Memory_Profiler_Capture_alloc(VALUE klass) {
311
+ struct Memory_Profiler_Capture *capture;
312
+ VALUE obj = TypedData_Make_Struct(klass, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
313
+
314
+ if (!capture) {
315
+ rb_raise(rb_eRuntimeError, "Failed to allocate Memory::Profiler::Capture");
316
+ }
317
+
318
+ capture->tracked_classes = st_init_numtable();
319
+
320
+ if (!capture->tracked_classes) {
321
+ rb_raise(rb_eRuntimeError, "Failed to initialize hash table");
322
+ }
323
+
324
+ capture->enabled = 0;
325
+
326
+ return obj;
327
+ }
328
+
329
+ // Initialize capture
330
+ static VALUE Memory_Profiler_Capture_initialize(VALUE self) {
331
+ return self;
332
+ }
333
+
334
+ // Start capturing allocations
335
+ static VALUE Memory_Profiler_Capture_start(VALUE self) {
336
+ struct Memory_Profiler_Capture *capture;
337
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
338
+
339
+ if (capture->enabled) return Qfalse;
340
+
341
+ // Add event hook for NEWOBJ and FREEOBJ with RAW_ARG to get trace_arg
342
+ rb_add_event_hook2(
343
+ (rb_event_hook_func_t)Memory_Profiler_Capture_event_callback,
344
+ RUBY_INTERNAL_EVENT_NEWOBJ | RUBY_INTERNAL_EVENT_FREEOBJ,
345
+ self,
346
+ RUBY_EVENT_HOOK_FLAG_SAFE | RUBY_EVENT_HOOK_FLAG_RAW_ARG
347
+ );
348
+
349
+ capture->enabled = 1;
350
+
351
+ return Qtrue;
352
+ }
353
+
354
+ // Stop capturing allocations
355
+ static VALUE Memory_Profiler_Capture_stop(VALUE self) {
356
+ struct Memory_Profiler_Capture *capture;
357
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
358
+
359
+ if (!capture->enabled) return Qfalse;
360
+
361
+ // Remove event hook using same data (self) we registered with
362
+ rb_remove_event_hook_with_data((rb_event_hook_func_t)Memory_Profiler_Capture_event_callback, self);
363
+
364
+ capture->enabled = 0;
365
+
366
+ return Qtrue;
367
+ }
368
+
369
+ // Add a class to track with optional callback
370
+ // Usage: track(klass) or track(klass) { |obj, klass| ... }
371
+ // Callback can call caller_locations with desired depth
372
+ static VALUE Memory_Profiler_Capture_track(int argc, VALUE *argv, VALUE self) {
373
+ struct Memory_Profiler_Capture *capture;
374
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
375
+
376
+ VALUE klass, callback;
377
+ rb_scan_args(argc, argv, "1&", &klass, &callback);
378
+
379
+ st_data_t record_data;
380
+ if (st_lookup(capture->tracked_classes, (st_data_t)klass, &record_data)) {
381
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)record_data;
382
+ RB_OBJ_WRITE(self, &record->callback, callback);
383
+ } else {
384
+ struct Memory_Profiler_Capture_Allocations *record = ALLOC(struct Memory_Profiler_Capture_Allocations);
385
+ record->callback = callback; // Initial assignment, no write barrier needed
386
+ record->new_count = 0;
387
+ record->free_count = 0;
388
+ record->object_states = NULL;
389
+ st_insert(capture->tracked_classes, (st_data_t)klass, (st_data_t)record);
390
+ // Now inform GC about the callback reference
391
+ if (!NIL_P(callback)) {
392
+ RB_OBJ_WRITTEN(self, Qnil, callback);
393
+ }
394
+ }
395
+
396
+ return self;
397
+ }
398
+
399
+ // Stop tracking a class
400
+ static VALUE Memory_Profiler_Capture_untrack(VALUE self, VALUE klass) {
401
+ struct Memory_Profiler_Capture *capture;
402
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
403
+
404
+ st_data_t record_data;
405
+ if (st_delete(capture->tracked_classes, (st_data_t *)&klass, &record_data)) {
406
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)record_data;
407
+ xfree(record);
408
+ }
409
+
410
+ return self;
411
+ }
412
+
413
+ // Check if tracking a class
414
+ static VALUE Memory_Profiler_Capture_tracking_p(VALUE self, VALUE klass) {
415
+ struct Memory_Profiler_Capture *capture;
416
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
417
+
418
+ return st_lookup(capture->tracked_classes, (st_data_t)klass, NULL) ? Qtrue : Qfalse;
419
+ }
420
+
421
+
422
+ // Get count of live objects for a specific class (O(1) lookup!)
423
+ static VALUE Memory_Profiler_Capture_count_for(VALUE self, VALUE klass) {
424
+ struct Memory_Profiler_Capture *capture;
425
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
426
+
427
+ st_data_t record_data;
428
+ if (st_lookup(capture->tracked_classes, (st_data_t)klass, &record_data)) {
429
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)record_data;
430
+ // Return net live count (new_count - free_count)
431
+ // Handle case where more objects freed than allocated (allocated before tracking started)
432
+ if (record->free_count > record->new_count) {
433
+ return INT2FIX(0); // Can't have negative live count
434
+ }
435
+ size_t live_count = record->new_count - record->free_count;
436
+ return SIZET2NUM(live_count);
437
+ }
438
+
439
+ return INT2FIX(0);
440
+ }
441
+
442
+ // Iterator to free and reset each class record
443
+ static int Memory_Profiler_Capture_reset_class(st_data_t key, st_data_t value, st_data_t arg) {
444
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)value;
445
+ record->new_count = 0; // Reset allocation count
446
+ record->free_count = 0; // Reset free count
447
+ record->callback = Qnil; // Clear callback
448
+
449
+ // Clear object states
450
+ if (record->object_states) {
451
+ st_free_table(record->object_states);
452
+ record->object_states = NULL;
453
+ }
454
+
455
+ return ST_CONTINUE;
456
+ }
457
+
458
+ // Clear all allocation tracking (resets all counts to 0)
459
+ static VALUE Memory_Profiler_Capture_clear(VALUE self) {
460
+ struct Memory_Profiler_Capture *capture;
461
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
462
+
463
+ // Reset all counts to 0 (don't free, just reset)
464
+ st_foreach(capture->tracked_classes, Memory_Profiler_Capture_reset_class, 0);
465
+
466
+ return self;
467
+ }
468
+
469
+ // TypedData for Allocations wrapper - just wraps the record pointer
470
+ static const rb_data_type_t Memory_Profiler_Allocations_type = {
471
+ "Memory::Profiler::Allocations",
472
+ {NULL, NULL, NULL}, // No mark/free needed - record is owned by Capture
473
+ 0, 0, 0
474
+ };
475
+
476
+ // Wrap an allocations record
477
+ static VALUE Memory_Profiler_Allocations_wrap(struct Memory_Profiler_Capture_Allocations *record) {
478
+ return TypedData_Wrap_Struct(Memory_Profiler_Allocations, &Memory_Profiler_Allocations_type, record);
479
+ }
480
+
481
+ // Get allocations record from wrapper
482
+ static struct Memory_Profiler_Capture_Allocations* Memory_Profiler_Allocations_get(VALUE self) {
483
+ struct Memory_Profiler_Capture_Allocations *record;
484
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture_Allocations, &Memory_Profiler_Allocations_type, record);
485
+ return record;
486
+ }
487
+
488
+ // Allocations#new_count
489
+ static VALUE Memory_Profiler_Allocations_new_count(VALUE self) {
490
+ struct Memory_Profiler_Capture_Allocations *record = Memory_Profiler_Allocations_get(self);
491
+ return SIZET2NUM(record->new_count);
492
+ }
493
+
494
+ // Allocations#free_count
495
+ static VALUE Memory_Profiler_Allocations_free_count(VALUE self) {
496
+ struct Memory_Profiler_Capture_Allocations *record = Memory_Profiler_Allocations_get(self);
497
+ return SIZET2NUM(record->free_count);
498
+ }
499
+
500
+ // Allocations#retained_count
501
+ static VALUE Memory_Profiler_Allocations_retained_count(VALUE self) {
502
+ struct Memory_Profiler_Capture_Allocations *record = Memory_Profiler_Allocations_get(self);
503
+ // Handle underflow when free_count > new_count
504
+ size_t retained = record->free_count > record->new_count ? 0 : record->new_count - record->free_count;
505
+ return SIZET2NUM(retained);
506
+ }
507
+
508
+ // Allocations#track { |klass| ... }
509
+ static VALUE Memory_Profiler_Allocations_track(int argc, VALUE *argv, VALUE self) {
510
+ struct Memory_Profiler_Capture_Allocations *record = Memory_Profiler_Allocations_get(self);
511
+
512
+ VALUE callback;
513
+ rb_scan_args(argc, argv, "&", &callback);
514
+
515
+ // Use write barrier - self (Allocations wrapper) keeps Capture alive, which keeps callback alive
516
+ RB_OBJ_WRITE(self, &record->callback, callback);
517
+
518
+ return self;
519
+ }
520
+
521
+ // Iterator callback for each
522
+ static int Memory_Profiler_Capture_each_allocation(st_data_t key, st_data_t value, st_data_t arg) {
523
+ VALUE klass = (VALUE)key;
524
+ struct Memory_Profiler_Capture_Allocations *record = (struct Memory_Profiler_Capture_Allocations *)value;
525
+
526
+ // Wrap the allocations record
527
+ VALUE allocations = Memory_Profiler_Allocations_wrap(record);
528
+
529
+ // Yield class and allocations wrapper
530
+ rb_yield_values(2, klass, allocations);
531
+
532
+ return ST_CONTINUE;
533
+ }
534
+
535
+ // Iterate over all tracked classes with their allocation data
536
+ static VALUE Memory_Profiler_Capture_each(VALUE self) {
537
+ struct Memory_Profiler_Capture *capture;
538
+ TypedData_Get_Struct(self, struct Memory_Profiler_Capture, &Memory_Profiler_Capture_type, capture);
539
+
540
+ RETURN_ENUMERATOR(self, 0, 0);
541
+
542
+ st_foreach(capture->tracked_classes, Memory_Profiler_Capture_each_allocation, 0);
543
+
544
+ return self;
545
+ }
546
+
547
+ void Init_Memory_Profiler_Capture(VALUE Memory_Profiler)
548
+ {
549
+ // Initialize event symbols
550
+ sym_newobj = ID2SYM(rb_intern("newobj"));
551
+ sym_freeobj = ID2SYM(rb_intern("freeobj"));
552
+ rb_gc_register_mark_object(sym_newobj);
553
+ rb_gc_register_mark_object(sym_freeobj);
554
+
555
+ Memory_Profiler_Capture = rb_define_class_under(Memory_Profiler, "Capture", rb_cObject);
556
+ rb_define_alloc_func(Memory_Profiler_Capture, Memory_Profiler_Capture_alloc);
557
+
558
+ rb_define_method(Memory_Profiler_Capture, "initialize", Memory_Profiler_Capture_initialize, 0);
559
+ rb_define_method(Memory_Profiler_Capture, "start", Memory_Profiler_Capture_start, 0);
560
+ rb_define_method(Memory_Profiler_Capture, "stop", Memory_Profiler_Capture_stop, 0);
561
+ rb_define_method(Memory_Profiler_Capture, "track", Memory_Profiler_Capture_track, -1); // -1 to accept block
562
+ rb_define_method(Memory_Profiler_Capture, "untrack", Memory_Profiler_Capture_untrack, 1);
563
+ rb_define_method(Memory_Profiler_Capture, "tracking?", Memory_Profiler_Capture_tracking_p, 1);
564
+ rb_define_method(Memory_Profiler_Capture, "count_for", Memory_Profiler_Capture_count_for, 1);
565
+ rb_define_method(Memory_Profiler_Capture, "each", Memory_Profiler_Capture_each, 0);
566
+ rb_define_method(Memory_Profiler_Capture, "clear", Memory_Profiler_Capture_clear, 0);
567
+
568
+ // Allocations class - wraps allocation data for a specific class
569
+ Memory_Profiler_Allocations = rb_define_class_under(Memory_Profiler, "Allocations", rb_cObject);
570
+
571
+ rb_define_method(Memory_Profiler_Allocations, "new_count", Memory_Profiler_Allocations_new_count, 0);
572
+ rb_define_method(Memory_Profiler_Allocations, "free_count", Memory_Profiler_Allocations_free_count, 0);
573
+ rb_define_method(Memory_Profiler_Allocations, "retained_count", Memory_Profiler_Allocations_retained_count, 0);
574
+ rb_define_method(Memory_Profiler_Allocations, "track", Memory_Profiler_Allocations_track, -1); // -1 to accept block
575
+ }
576
+
@@ -0,0 +1,9 @@
1
+ // Released under the MIT License.
2
+ // Copyright, 2025, by Samuel Williams.
3
+
4
+ #pragma once
5
+
6
+ #include <ruby.h>
7
+
8
+ void Init_Memory_Profiler_Capture(VALUE Memory_Profiler);
9
+
@@ -0,0 +1,17 @@
1
+ // Released under the MIT License.
2
+ // Copyright, 2025, by Samuel Williams.
3
+
4
+ #include "capture.h"
5
+
6
+ void Init_Memory_Profiler(void)
7
+ {
8
+ #ifdef HAVE_RB_EXT_RACTOR_SAFE
9
+ rb_ext_ractor_safe(true);
10
+ #endif
11
+
12
+ VALUE Memory = rb_const_get(rb_cObject, rb_intern("Memory"));
13
+ VALUE Memory_Profiler = rb_define_module_under(Memory, "Profiler");
14
+
15
+ Init_Memory_Profiler_Capture(Memory_Profiler);
16
+ }
17
+