kanayago 0.1.1 → 0.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (77) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +12 -0
  3. data/.ruby-version +1 -0
  4. data/README.md +20 -29
  5. data/Rakefile +43 -96
  6. data/ext/kanayago/extconf.rb +6 -0
  7. data/ext/kanayago/id.h +12 -5
  8. data/ext/kanayago/id_table.h +15 -0
  9. data/ext/kanayago/include/ruby/st.h +199 -0
  10. data/ext/kanayago/internal/array.h +3 -0
  11. data/ext/kanayago/internal/basic_operators.h +1 -0
  12. data/ext/kanayago/internal/bignum.h +1 -0
  13. data/ext/kanayago/internal/bits.h +82 -0
  14. data/ext/kanayago/internal/encoding.h +4 -1
  15. data/ext/kanayago/internal/error.h +33 -0
  16. data/ext/kanayago/internal/fixnum.h +1 -0
  17. data/ext/kanayago/internal/gc.h +47 -11
  18. data/ext/kanayago/internal/hash.h +3 -0
  19. data/ext/kanayago/internal/imemo.h +93 -32
  20. data/ext/kanayago/internal/io.h +30 -7
  21. data/ext/kanayago/internal/namespace.h +81 -0
  22. data/ext/kanayago/internal/numeric.h +1 -0
  23. data/ext/kanayago/internal/parse.h +17 -3
  24. data/ext/kanayago/internal/re.h +7 -2
  25. data/ext/kanayago/internal/sanitizers.h +88 -39
  26. data/ext/kanayago/internal/set_table.h +70 -0
  27. data/ext/kanayago/internal/string.h +33 -16
  28. data/ext/kanayago/internal/symbol.h +4 -3
  29. data/ext/kanayago/internal/thread.h +42 -9
  30. data/ext/kanayago/internal/variable.h +13 -11
  31. data/ext/kanayago/internal/vm.h +4 -5
  32. data/ext/kanayago/internal.h +0 -3
  33. data/ext/kanayago/kanayago.c +554 -235
  34. data/ext/kanayago/kanayago.h +5 -0
  35. data/ext/kanayago/literal_node.c +343 -0
  36. data/ext/kanayago/literal_node.h +30 -0
  37. data/ext/kanayago/method.h +18 -2
  38. data/ext/kanayago/node.c +7 -1
  39. data/ext/kanayago/node.h +14 -3
  40. data/ext/kanayago/parse.c +7602 -7156
  41. data/ext/kanayago/parse.h +39 -39
  42. data/ext/kanayago/parser_st.c +2 -1
  43. data/ext/kanayago/pattern_node.c +78 -0
  44. data/ext/kanayago/pattern_node.h +13 -0
  45. data/ext/kanayago/ruby_atomic.h +43 -0
  46. data/ext/kanayago/ruby_parser.c +7 -35
  47. data/ext/kanayago/rubyparser.h +83 -80
  48. data/ext/kanayago/scope_node.c +34 -0
  49. data/ext/kanayago/scope_node.h +8 -0
  50. data/ext/kanayago/shape.h +321 -111
  51. data/ext/kanayago/st.c +905 -21
  52. data/ext/kanayago/statement_node.c +795 -0
  53. data/ext/kanayago/statement_node.h +66 -0
  54. data/ext/kanayago/string_node.c +192 -0
  55. data/ext/kanayago/string_node.h +19 -0
  56. data/ext/kanayago/symbol.h +2 -9
  57. data/ext/kanayago/thread_pthread.h +10 -3
  58. data/ext/kanayago/universal_parser.c +1 -20
  59. data/ext/kanayago/variable_node.c +72 -0
  60. data/ext/kanayago/variable_node.h +12 -0
  61. data/ext/kanayago/vm_core.h +205 -71
  62. data/lib/kanayago/literal_node.rb +87 -0
  63. data/lib/kanayago/pattern_node.rb +19 -0
  64. data/lib/kanayago/statement_node.rb +222 -0
  65. data/lib/kanayago/string_node.rb +43 -0
  66. data/lib/kanayago/variable_node.rb +23 -0
  67. data/lib/kanayago/version.rb +1 -1
  68. data/lib/kanayago.rb +22 -0
  69. data/patch/3.4/copy_target.rb +78 -0
  70. data/patch/3.4/kanayago.patch +162 -0
  71. data/patch/head/copy_target.rb +84 -0
  72. data/patch/head/kanayago.patch +162 -0
  73. data/sample/minitest_generator.rb +266 -0
  74. data/sample/test_generator.rb +272 -0
  75. data/typeprof.conf.json +9 -0
  76. metadata +32 -4
  77. data/ext/kanayago/parse.tmp.y +0 -16145
data/ext/kanayago/shape.h CHANGED
@@ -3,60 +3,94 @@
3
3
 
4
4
  #include "internal/gc.h"
5
5
 
6
- #if (SIZEOF_UINT64_T <= SIZEOF_VALUE)
7
-
8
- #define SIZEOF_SHAPE_T 4
9
- #define SHAPE_IN_BASIC_FLAGS 1
10
- typedef uint32_t attr_index_t;
6
+ typedef uint16_t attr_index_t;
11
7
  typedef uint32_t shape_id_t;
12
- # define SHAPE_ID_NUM_BITS 32
8
+ #define SHAPE_ID_NUM_BITS 32
9
+ #define SHAPE_ID_OFFSET_NUM_BITS 19
13
10
 
14
- #else
11
+ STATIC_ASSERT(shape_id_num_bits, SHAPE_ID_NUM_BITS == sizeof(shape_id_t) * CHAR_BIT);
15
12
 
16
- #define SIZEOF_SHAPE_T 2
17
- #define SHAPE_IN_BASIC_FLAGS 0
18
- typedef uint16_t attr_index_t;
19
- typedef uint16_t shape_id_t;
20
- # define SHAPE_ID_NUM_BITS 16
13
+ #define SHAPE_BUFFER_SIZE (1 << SHAPE_ID_OFFSET_NUM_BITS)
14
+ #define SHAPE_ID_OFFSET_MASK (SHAPE_BUFFER_SIZE - 1)
21
15
 
22
- #endif
16
+ #define SHAPE_ID_HEAP_INDEX_BITS 3
17
+ #define SHAPE_ID_HEAP_INDEX_MAX ((1 << SHAPE_ID_HEAP_INDEX_BITS) - 1)
23
18
 
24
- typedef uint32_t redblack_id_t;
19
+ #define SHAPE_ID_FL_USHIFT (SHAPE_ID_OFFSET_NUM_BITS + SHAPE_ID_HEAP_INDEX_BITS)
20
+ #define SHAPE_ID_HEAP_INDEX_OFFSET SHAPE_ID_FL_USHIFT
21
+
22
+ // shape_id_t bits:
23
+ // 0-18 SHAPE_ID_OFFSET_MASK
24
+ // index in rb_shape_tree.shape_list. Allow to access `rb_shape_t *`.
25
+ // 19-21 SHAPE_ID_HEAP_INDEX_MASK
26
+ // index in rb_shape_tree.capacities. Allow to access slot size.
27
+ // Always 0 except for T_OBJECT.
28
+ // 22 SHAPE_ID_FL_FROZEN
29
+ // Whether the object is frozen or not.
30
+ // 23 SHAPE_ID_FL_HAS_OBJECT_ID
31
+ // Whether the object has an `SHAPE_OBJ_ID` transition.
32
+ // 24 SHAPE_ID_FL_TOO_COMPLEX
33
+ // The object is backed by a `st_table`.
34
+
35
+ enum shape_id_fl_type {
36
+ #define RBIMPL_SHAPE_ID_FL(n) (1<<(SHAPE_ID_FL_USHIFT+n))
25
37
 
26
- #define MAX_IVARS (attr_index_t)(-1)
38
+ SHAPE_ID_HEAP_INDEX_MASK = RBIMPL_SHAPE_ID_FL(0) | RBIMPL_SHAPE_ID_FL(1) | RBIMPL_SHAPE_ID_FL(2),
27
39
 
28
- # define SHAPE_MASK (((uintptr_t)1 << SHAPE_ID_NUM_BITS) - 1)
29
- # define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_ID_NUM_BITS)
40
+ SHAPE_ID_FL_FROZEN = RBIMPL_SHAPE_ID_FL(3),
41
+ SHAPE_ID_FL_HAS_OBJECT_ID = RBIMPL_SHAPE_ID_FL(4),
42
+ SHAPE_ID_FL_TOO_COMPLEX = RBIMPL_SHAPE_ID_FL(5),
30
43
 
31
- # define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * 8) - SHAPE_ID_NUM_BITS)
44
+ SHAPE_ID_FL_NON_CANONICAL_MASK = SHAPE_ID_FL_FROZEN | SHAPE_ID_FL_HAS_OBJECT_ID,
45
+ SHAPE_ID_FLAGS_MASK = SHAPE_ID_HEAP_INDEX_MASK | SHAPE_ID_FL_NON_CANONICAL_MASK | SHAPE_ID_FL_TOO_COMPLEX,
46
+
47
+ #undef RBIMPL_SHAPE_ID_FL
48
+ };
49
+
50
+ // This mask allows to check if a shape_id contains any ivar.
51
+ // It relies on ROOT_SHAPE_WITH_OBJ_ID==1.
52
+ enum shape_id_mask {
53
+ SHAPE_ID_HAS_IVAR_MASK = SHAPE_ID_FL_TOO_COMPLEX | (SHAPE_ID_OFFSET_MASK - 1),
54
+ };
55
+
56
+ // The interpreter doesn't care about frozen status or slot size when reading ivars.
57
+ // So we normalize shape_id by clearing these bits to improve cache hits.
58
+ // JITs however might care about it.
59
+ #define SHAPE_ID_READ_ONLY_MASK (~(SHAPE_ID_FL_FROZEN | SHAPE_ID_HEAP_INDEX_MASK))
60
+
61
+ typedef uint32_t redblack_id_t;
32
62
 
33
- # define SHAPE_MAX_VARIATIONS 8
63
+ #define SHAPE_MAX_FIELDS (attr_index_t)(-1)
64
+ #define SHAPE_FLAG_SHIFT ((SIZEOF_VALUE * CHAR_BIT) - SHAPE_ID_NUM_BITS)
65
+ #define SHAPE_FLAG_MASK (((VALUE)-1) >> SHAPE_ID_NUM_BITS)
34
66
 
35
- # define INVALID_SHAPE_ID SHAPE_MASK
36
- # define ROOT_SHAPE_ID 0x0
67
+ #define SHAPE_MAX_VARIATIONS 8
37
68
 
38
- # define SPECIAL_CONST_SHAPE_ID (ROOT_SHAPE_ID + 1)
39
- # define OBJ_TOO_COMPLEX_SHAPE_ID (SPECIAL_CONST_SHAPE_ID + 1)
40
- # define FIRST_T_OBJECT_SHAPE_ID (OBJ_TOO_COMPLEX_SHAPE_ID + 1)
69
+ #define INVALID_SHAPE_ID ((shape_id_t)-1)
70
+ #define ATTR_INDEX_NOT_SET ((attr_index_t)-1)
71
+
72
+ #define ROOT_SHAPE_ID 0x0
73
+ #define ROOT_SHAPE_WITH_OBJ_ID 0x1
74
+ #define ROOT_TOO_COMPLEX_SHAPE_ID (ROOT_SHAPE_ID | SHAPE_ID_FL_TOO_COMPLEX)
75
+ #define ROOT_TOO_COMPLEX_WITH_OBJ_ID (ROOT_SHAPE_WITH_OBJ_ID | SHAPE_ID_FL_TOO_COMPLEX | SHAPE_ID_FL_HAS_OBJECT_ID)
41
76
 
42
77
  typedef struct redblack_node redblack_node_t;
43
78
 
44
79
  struct rb_shape {
45
- struct rb_id_table * edges; // id_table from ID (ivar) to next shape
80
+ VALUE edges; // id_table from ID (ivar) to next shape
46
81
  ID edge_name; // ID (ivar) for transition from parent to rb_shape
47
- attr_index_t next_iv_index;
48
- uint32_t capacity; // Total capacity of the object with this shape
49
- uint8_t type;
50
- uint8_t size_pool_index;
82
+ redblack_node_t *ancestor_index;
51
83
  shape_id_t parent_id;
52
- redblack_node_t * ancestor_index;
84
+ attr_index_t next_field_index; // Fields are either ivars or internal properties like `object_id`
85
+ attr_index_t capacity; // Total capacity of the object with this shape
86
+ uint8_t type;
53
87
  };
54
88
 
55
89
  typedef struct rb_shape rb_shape_t;
56
90
 
57
91
  struct redblack_node {
58
92
  ID key;
59
- rb_shape_t * value;
93
+ rb_shape_t *value;
60
94
  redblack_id_t l;
61
95
  redblack_id_t r;
62
96
  };
@@ -64,171 +98,347 @@ struct redblack_node {
64
98
  enum shape_type {
65
99
  SHAPE_ROOT,
66
100
  SHAPE_IVAR,
67
- SHAPE_FROZEN,
68
- SHAPE_T_OBJECT,
69
- SHAPE_OBJ_TOO_COMPLEX,
101
+ SHAPE_OBJ_ID,
102
+ };
103
+
104
+ enum shape_flags {
105
+ SHAPE_FL_FROZEN = 1 << 0,
106
+ SHAPE_FL_HAS_OBJECT_ID = 1 << 1,
107
+ SHAPE_FL_TOO_COMPLEX = 1 << 2,
108
+
109
+ SHAPE_FL_NON_CANONICAL_MASK = SHAPE_FL_FROZEN | SHAPE_FL_HAS_OBJECT_ID,
70
110
  };
71
111
 
72
112
  typedef struct {
73
113
  /* object shapes */
74
114
  rb_shape_t *shape_list;
75
115
  rb_shape_t *root_shape;
76
- shape_id_t next_shape_id;
116
+ const attr_index_t *capacities;
117
+ rb_atomic_t next_shape_id;
77
118
 
78
119
  redblack_node_t *shape_cache;
79
120
  unsigned int cache_size;
80
121
  } rb_shape_tree_t;
81
- RUBY_EXTERN rb_shape_tree_t *rb_shape_tree_ptr;
82
122
 
83
- static inline rb_shape_tree_t *
84
- rb_current_shape_tree(void)
123
+ RUBY_SYMBOL_EXPORT_BEGIN
124
+ RUBY_EXTERN rb_shape_tree_t rb_shape_tree;
125
+ RUBY_SYMBOL_EXPORT_END
126
+
127
+ static inline shape_id_t
128
+ rb_shapes_count(void)
85
129
  {
86
- return rb_shape_tree_ptr;
130
+ return (shape_id_t)RUBY_ATOMIC_LOAD(rb_shape_tree.next_shape_id);
87
131
  }
88
- #define GET_SHAPE_TREE() rb_current_shape_tree()
132
+
133
+ union rb_attr_index_cache {
134
+ uint64_t pack;
135
+ struct {
136
+ shape_id_t shape_id;
137
+ attr_index_t index;
138
+ } unpack;
139
+ };
89
140
 
90
141
  static inline shape_id_t
91
- get_shape_id_from_flags(VALUE obj)
142
+ RBASIC_SHAPE_ID(VALUE obj)
92
143
  {
93
144
  RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
94
- return (shape_id_t)(SHAPE_MASK & ((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT));
145
+ RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO) || IMEMO_TYPE_P(obj, imemo_fields));
146
+ #if RBASIC_SHAPE_ID_FIELD
147
+ return (shape_id_t)((RBASIC(obj)->shape_id));
148
+ #else
149
+ return (shape_id_t)((RBASIC(obj)->flags) >> SHAPE_FLAG_SHIFT);
150
+ #endif
151
+ }
152
+
153
+ // Same as RBASIC_SHAPE_ID but with flags that have no impact
154
+ // on reads removed. e.g. Remove FL_FROZEN.
155
+ static inline shape_id_t
156
+ RBASIC_SHAPE_ID_FOR_READ(VALUE obj)
157
+ {
158
+ return RBASIC_SHAPE_ID(obj) & SHAPE_ID_READ_ONLY_MASK;
95
159
  }
96
160
 
161
+ #if RUBY_DEBUG
162
+ bool rb_shape_verify_consistency(VALUE obj, shape_id_t shape_id);
163
+ #endif
164
+
97
165
  static inline void
98
- set_shape_id_in_flags(VALUE obj, shape_id_t shape_id)
166
+ RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
99
167
  {
100
- // Ractors are occupying the upper 32 bits of flags, but only in debug mode
168
+ RUBY_ASSERT(!RB_SPECIAL_CONST_P(obj));
169
+ RUBY_ASSERT(!RB_TYPE_P(obj, T_IMEMO) || IMEMO_TYPE_P(obj, imemo_fields));
170
+ #if RBASIC_SHAPE_ID_FIELD
171
+ RBASIC(obj)->shape_id = (VALUE)shape_id;
172
+ #else
101
173
  // Object shapes are occupying top bits
102
174
  RBASIC(obj)->flags &= SHAPE_FLAG_MASK;
103
175
  RBASIC(obj)->flags |= ((VALUE)(shape_id) << SHAPE_FLAG_SHIFT);
176
+ #endif
177
+ RUBY_ASSERT(rb_shape_verify_consistency(obj, shape_id));
104
178
  }
105
179
 
180
+ void rb_set_namespaced_class_shape_id(VALUE obj, shape_id_t shape_id);
106
181
 
107
- #if SHAPE_IN_BASIC_FLAGS
108
- static inline shape_id_t
109
- RBASIC_SHAPE_ID(VALUE obj)
182
+ static inline void
183
+ RB_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
110
184
  {
111
- return get_shape_id_from_flags(obj);
185
+ switch (BUILTIN_TYPE(obj)) {
186
+ case T_CLASS:
187
+ case T_MODULE:
188
+ rb_set_namespaced_class_shape_id(obj, shape_id);
189
+ break;
190
+ default:
191
+ RBASIC_SET_SHAPE_ID(obj, shape_id);
192
+ break;
193
+ }
112
194
  }
113
195
 
114
- static inline void
115
- RBASIC_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
196
+ static inline rb_shape_t *
197
+ RSHAPE(shape_id_t shape_id)
116
198
  {
117
- set_shape_id_in_flags(obj, shape_id);
199
+ uint32_t offset = (shape_id & SHAPE_ID_OFFSET_MASK);
200
+ RUBY_ASSERT(offset != INVALID_SHAPE_ID);
201
+
202
+ return &rb_shape_tree.shape_list[offset];
118
203
  }
119
- #endif
120
204
 
121
- static inline shape_id_t
122
- ROBJECT_SHAPE_ID(VALUE obj)
205
+ int32_t rb_shape_id_offset(void);
206
+
207
+ RUBY_FUNC_EXPORTED shape_id_t rb_obj_shape_id(VALUE obj);
208
+ shape_id_t rb_shape_get_next_iv_shape(shape_id_t shape_id, ID id);
209
+ bool rb_shape_get_iv_index(shape_id_t shape_id, ID id, attr_index_t *value);
210
+ bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint);
211
+ bool rb_shape_find_ivar(shape_id_t shape_id, ID id, shape_id_t *ivar_shape);
212
+
213
+ typedef int rb_shape_foreach_transition_callback(shape_id_t shape_id, void *data);
214
+ bool rb_shape_foreach_field(shape_id_t shape_id, rb_shape_foreach_transition_callback func, void *data);
215
+
216
+ shape_id_t rb_shape_transition_frozen(VALUE obj);
217
+ shape_id_t rb_shape_transition_complex(VALUE obj);
218
+ shape_id_t rb_shape_transition_remove_ivar(VALUE obj, ID id, shape_id_t *removed_shape_id);
219
+ shape_id_t rb_shape_transition_add_ivar(VALUE obj, ID id);
220
+ shape_id_t rb_shape_transition_add_ivar_no_warnings(VALUE obj, ID id);
221
+ shape_id_t rb_shape_transition_object_id(VALUE obj);
222
+ shape_id_t rb_shape_transition_heap(VALUE obj, size_t heap_index);
223
+ shape_id_t rb_shape_object_id(shape_id_t original_shape_id);
224
+
225
+ void rb_shape_free_all(void);
226
+
227
+ shape_id_t rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_id);
228
+ void rb_shape_copy_fields(VALUE dest, VALUE *dest_buf, shape_id_t dest_shape_id, VALUE *src_buf, shape_id_t src_shape_id);
229
+ void rb_shape_copy_complex_ivars(VALUE dest, VALUE obj, shape_id_t src_shape_id, st_table *fields_table);
230
+
231
+ static inline bool
232
+ rb_shape_too_complex_p(shape_id_t shape_id)
123
233
  {
124
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
125
- return get_shape_id_from_flags(obj);
234
+ return shape_id & SHAPE_ID_FL_TOO_COMPLEX;
126
235
  }
127
236
 
128
- static inline void
129
- ROBJECT_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
237
+ static inline bool
238
+ rb_shape_obj_too_complex_p(VALUE obj)
130
239
  {
131
- RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
132
- set_shape_id_in_flags(obj, shape_id);
240
+ return !RB_SPECIAL_CONST_P(obj) && rb_shape_too_complex_p(RBASIC_SHAPE_ID(obj));
241
+ }
242
+
243
+ static inline bool
244
+ rb_shape_has_object_id(shape_id_t shape_id)
245
+ {
246
+ return shape_id & SHAPE_ID_FL_HAS_OBJECT_ID;
247
+ }
248
+
249
+ static inline bool
250
+ rb_shape_canonical_p(shape_id_t shape_id)
251
+ {
252
+ return !(shape_id & SHAPE_ID_FL_NON_CANONICAL_MASK);
253
+ }
254
+
255
+ static inline uint8_t
256
+ rb_shape_heap_index(shape_id_t shape_id)
257
+ {
258
+ return (uint8_t)((shape_id & SHAPE_ID_HEAP_INDEX_MASK) >> SHAPE_ID_HEAP_INDEX_OFFSET);
133
259
  }
134
260
 
135
261
  static inline shape_id_t
136
- RCLASS_SHAPE_ID(VALUE obj)
262
+ rb_shape_root(size_t heap_id)
137
263
  {
138
- RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
139
- return get_shape_id_from_flags(obj);
264
+ shape_id_t heap_index = (shape_id_t)(heap_id + 1);
265
+ shape_id_t heap_flags = heap_index << SHAPE_ID_HEAP_INDEX_OFFSET;
266
+
267
+ RUBY_ASSERT((heap_flags & SHAPE_ID_HEAP_INDEX_MASK) == heap_flags);
268
+ RUBY_ASSERT(rb_shape_heap_index(heap_flags) == heap_index);
269
+
270
+ return ROOT_SHAPE_ID | heap_flags;
140
271
  }
141
272
 
142
- static inline void
143
- RCLASS_SET_SHAPE_ID(VALUE obj, shape_id_t shape_id)
273
+ static inline shape_id_t
274
+ RSHAPE_PARENT_RAW_ID(shape_id_t shape_id)
144
275
  {
145
- RUBY_ASSERT(RB_TYPE_P(obj, T_CLASS) || RB_TYPE_P(obj, T_MODULE));
146
- set_shape_id_in_flags(obj, shape_id);
276
+ return RSHAPE(shape_id)->parent_id;
147
277
  }
148
278
 
149
- rb_shape_t * rb_shape_get_root_shape(void);
150
- int32_t rb_shape_id_offset(void);
279
+ static inline bool
280
+ RSHAPE_DIRECT_CHILD_P(shape_id_t parent_id, shape_id_t child_id)
281
+ {
282
+ return (parent_id & SHAPE_ID_FLAGS_MASK) == (child_id & SHAPE_ID_FLAGS_MASK) &&
283
+ RSHAPE(child_id)->parent_id == (parent_id & SHAPE_ID_OFFSET_MASK);
284
+ }
285
+
286
+ static inline enum shape_type
287
+ RSHAPE_TYPE(shape_id_t shape_id)
288
+ {
289
+ return RSHAPE(shape_id)->type;
290
+ }
291
+
292
+ static inline bool
293
+ RSHAPE_TYPE_P(shape_id_t shape_id, enum shape_type type)
294
+ {
295
+ return RSHAPE_TYPE(shape_id) == type;
296
+ }
297
+
298
+ static inline attr_index_t
299
+ RSHAPE_EMBEDDED_CAPACITY(shape_id_t shape_id)
300
+ {
301
+ uint8_t heap_index = rb_shape_heap_index(shape_id);
302
+ if (heap_index) {
303
+ return rb_shape_tree.capacities[heap_index - 1];
304
+ }
305
+ return 0;
306
+ }
307
+
308
+ static inline attr_index_t
309
+ RSHAPE_CAPACITY(shape_id_t shape_id)
310
+ {
311
+ attr_index_t embedded_capacity = RSHAPE_EMBEDDED_CAPACITY(shape_id);
151
312
 
152
- rb_shape_t * rb_shape_get_parent(rb_shape_t * shape);
313
+ if (embedded_capacity > RSHAPE(shape_id)->capacity) {
314
+ return embedded_capacity;
315
+ }
316
+ else {
317
+ return RSHAPE(shape_id)->capacity;
318
+ }
319
+ }
153
320
 
154
- RUBY_FUNC_EXPORTED rb_shape_t *rb_shape_get_shape_by_id(shape_id_t shape_id);
155
- RUBY_FUNC_EXPORTED shape_id_t rb_shape_get_shape_id(VALUE obj);
156
- rb_shape_t * rb_shape_get_next_iv_shape(rb_shape_t * shape, ID id);
157
- bool rb_shape_get_iv_index(rb_shape_t * shape, ID id, attr_index_t * value);
158
- bool rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t * value, shape_id_t *shape_id_hint);
159
- RUBY_FUNC_EXPORTED bool rb_shape_obj_too_complex(VALUE obj);
321
+ static inline attr_index_t
322
+ RSHAPE_LEN(shape_id_t shape_id)
323
+ {
324
+ return RSHAPE(shape_id)->next_field_index;
325
+ }
160
326
 
161
- void rb_shape_set_shape(VALUE obj, rb_shape_t* shape);
162
- rb_shape_t* rb_shape_get_shape(VALUE obj);
163
- int rb_shape_frozen_shape_p(rb_shape_t* shape);
164
- rb_shape_t* rb_shape_transition_shape_frozen(VALUE obj);
165
- bool rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE * removed);
166
- rb_shape_t* rb_shape_get_next(rb_shape_t* shape, VALUE obj, ID id);
167
- rb_shape_t* rb_shape_get_next_no_warnings(rb_shape_t* shape, VALUE obj, ID id);
327
+ static inline attr_index_t
328
+ RSHAPE_INDEX(shape_id_t shape_id)
329
+ {
330
+ RUBY_ASSERT(RSHAPE_LEN(shape_id) > 0);
331
+ return RSHAPE_LEN(shape_id) - 1;
332
+ }
168
333
 
169
- rb_shape_t * rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape);
334
+ static inline ID
335
+ RSHAPE_EDGE_NAME(shape_id_t shape_id)
336
+ {
337
+ return RSHAPE(shape_id)->edge_name;
338
+ }
170
339
 
171
340
  static inline uint32_t
172
- ROBJECT_IV_CAPACITY(VALUE obj)
341
+ ROBJECT_FIELDS_CAPACITY(VALUE obj)
173
342
  {
174
343
  RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
175
344
  // Asking for capacity doesn't make sense when the object is using
176
345
  // a hash table for storing instance variables
177
- RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
178
- return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->capacity;
346
+ RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
347
+ return RSHAPE_CAPACITY(RBASIC_SHAPE_ID(obj));
179
348
  }
180
349
 
181
350
  static inline st_table *
182
- ROBJECT_IV_HASH(VALUE obj)
351
+ ROBJECT_FIELDS_HASH(VALUE obj)
183
352
  {
184
353
  RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
185
- RUBY_ASSERT(rb_shape_obj_too_complex(obj));
186
- return (st_table *)ROBJECT(obj)->as.heap.ivptr;
354
+ RUBY_ASSERT(rb_shape_obj_too_complex_p(obj));
355
+ RUBY_ASSERT(FL_TEST_RAW(obj, ROBJECT_HEAP));
356
+
357
+ return (st_table *)ROBJECT(obj)->as.heap.fields;
187
358
  }
188
359
 
189
360
  static inline void
190
- ROBJECT_SET_IV_HASH(VALUE obj, const st_table *tbl)
361
+ ROBJECT_SET_FIELDS_HASH(VALUE obj, const st_table *tbl)
191
362
  {
192
363
  RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
193
- RUBY_ASSERT(rb_shape_obj_too_complex(obj));
194
- ROBJECT(obj)->as.heap.ivptr = (VALUE *)tbl;
195
- }
364
+ RUBY_ASSERT(rb_shape_obj_too_complex_p(obj));
365
+ RUBY_ASSERT(FL_TEST_RAW(obj, ROBJECT_HEAP));
196
366
 
197
- size_t rb_id_table_size(const struct rb_id_table *tbl);
367
+ ROBJECT(obj)->as.heap.fields = (VALUE *)tbl;
368
+ }
198
369
 
199
370
  static inline uint32_t
200
- ROBJECT_IV_COUNT(VALUE obj)
371
+ ROBJECT_FIELDS_COUNT(VALUE obj)
201
372
  {
202
- if (rb_shape_obj_too_complex(obj)) {
203
- return (uint32_t)rb_st_table_size(ROBJECT_IV_HASH(obj));
373
+ if (rb_shape_obj_too_complex_p(obj)) {
374
+ return (uint32_t)rb_st_table_size(ROBJECT_FIELDS_HASH(obj));
204
375
  }
205
376
  else {
206
377
  RBIMPL_ASSERT_TYPE(obj, RUBY_T_OBJECT);
207
- RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
208
- return rb_shape_get_shape_by_id(ROBJECT_SHAPE_ID(obj))->next_iv_index;
378
+ RUBY_ASSERT(!rb_shape_obj_too_complex_p(obj));
379
+ return RSHAPE(RBASIC_SHAPE_ID(obj))->next_field_index;
209
380
  }
210
381
  }
211
382
 
212
383
  static inline uint32_t
213
- RBASIC_IV_COUNT(VALUE obj)
384
+ RBASIC_FIELDS_COUNT(VALUE obj)
214
385
  {
215
- return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj))->next_iv_index;
386
+ return RSHAPE(rb_obj_shape_id(obj))->next_field_index;
216
387
  }
217
388
 
218
- rb_shape_t *rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *orig_shape);
389
+ static inline bool
390
+ rb_shape_obj_has_id(VALUE obj)
391
+ {
392
+ return rb_shape_has_object_id(RBASIC_SHAPE_ID(obj));
393
+ }
219
394
 
220
- bool rb_shape_set_shape_id(VALUE obj, shape_id_t shape_id);
395
+ static inline bool
396
+ rb_shape_has_ivars(shape_id_t shape_id)
397
+ {
398
+ return shape_id & SHAPE_ID_HAS_IVAR_MASK;
399
+ }
400
+
401
+ static inline bool
402
+ rb_shape_obj_has_ivars(VALUE obj)
403
+ {
404
+ return rb_shape_has_ivars(RBASIC_SHAPE_ID(obj));
405
+ }
406
+
407
+ static inline bool
408
+ rb_shape_has_fields(shape_id_t shape_id)
409
+ {
410
+ return shape_id & (SHAPE_ID_OFFSET_MASK | SHAPE_ID_FL_TOO_COMPLEX);
411
+ }
412
+
413
+ static inline bool
414
+ rb_shape_obj_has_fields(VALUE obj)
415
+ {
416
+ return rb_shape_has_fields(RBASIC_SHAPE_ID(obj));
417
+ }
221
418
 
222
- VALUE rb_obj_debug_shape(VALUE self, VALUE obj);
419
+ static inline bool
420
+ rb_obj_exivar_p(VALUE obj)
421
+ {
422
+ switch (TYPE(obj)) {
423
+ case T_NONE:
424
+ case T_OBJECT:
425
+ case T_CLASS:
426
+ case T_MODULE:
427
+ case T_IMEMO:
428
+ return false;
429
+ default:
430
+ break;
431
+ }
432
+ return rb_shape_obj_has_fields(obj);
433
+ }
223
434
 
224
435
  // For ext/objspace
225
436
  RUBY_SYMBOL_EXPORT_BEGIN
226
- typedef void each_shape_callback(rb_shape_t * shape, void *data);
227
- void rb_shape_each_shape(each_shape_callback callback, void *data);
228
- size_t rb_shape_memsize(rb_shape_t *shape);
229
- size_t rb_shape_edges_count(rb_shape_t *shape);
230
- size_t rb_shape_depth(rb_shape_t *shape);
231
- shape_id_t rb_shape_id(rb_shape_t * shape);
437
+ typedef void each_shape_callback(shape_id_t shape_id, void *data);
438
+ void rb_shape_each_shape_id(each_shape_callback callback, void *data);
439
+ size_t rb_shape_memsize(shape_id_t shape);
440
+ size_t rb_shape_edges_count(shape_id_t shape_id);
441
+ size_t rb_shape_depth(shape_id_t shape_id);
232
442
  RUBY_SYMBOL_EXPORT_END
233
443
 
234
444
  #endif