Ruby 3.5.0dev (2025-01-10 revision 5fab31b15e32622c4b71d1d347a41937e9f9c212)
shape.c (5fab31b15e32622c4b71d1d347a41937e9f9c212)
1#include "vm_core.h"
2#include "vm_sync.h"
3#include "shape.h"
4#include "symbol.h"
5#include "id_table.h"
6#include "internal/class.h"
7#include "internal/error.h"
8#include "internal/gc.h"
9#include "internal/object.h"
10#include "internal/symbol.h"
11#include "internal/variable.h"
12#include "variable.h"
13#include <stdbool.h>
14
15#ifndef _WIN32
16#include <sys/mman.h>
17#endif
18
19#ifndef SHAPE_DEBUG
20#define SHAPE_DEBUG (VM_CHECK_MODE > 0)
21#endif
22
23#if SIZEOF_SHAPE_T == 4
24#if RUBY_DEBUG
25#define SHAPE_BUFFER_SIZE 0x8000
26#else
27#define SHAPE_BUFFER_SIZE 0x80000
28#endif
29#else
30#define SHAPE_BUFFER_SIZE 0x8000
31#endif
32
33#define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32)
34
35/* This depends on that the allocated memory by Ruby's allocator or
36 * mmap is not located at an odd address. */
37#define SINGLE_CHILD_TAG 0x1
38#define TAG_SINGLE_CHILD(x) (struct rb_id_table *)((uintptr_t)(x) | SINGLE_CHILD_TAG)
39#define SINGLE_CHILD_MASK (~((uintptr_t)SINGLE_CHILD_TAG))
40#define SINGLE_CHILD_P(x) ((uintptr_t)(x) & SINGLE_CHILD_TAG)
41#define SINGLE_CHILD(x) (rb_shape_t *)((uintptr_t)(x) & SINGLE_CHILD_MASK)
42#define ANCESTOR_CACHE_THRESHOLD 10
43#define MAX_SHAPE_ID (SHAPE_BUFFER_SIZE - 1)
44#define ANCESTOR_SEARCH_MAX_DEPTH 2
45
46static ID id_frozen;
47static ID id_t_object;
48
49#define LEAF 0
50#define BLACK 0x0
51#define RED 0x1
52
53static redblack_node_t *
54redblack_left(redblack_node_t * node)
55{
56 if (node->l == LEAF) {
57 return LEAF;
58 }
59 else {
60 RUBY_ASSERT(node->l < GET_SHAPE_TREE()->cache_size);
61 redblack_node_t * left = &GET_SHAPE_TREE()->shape_cache[node->l - 1];
62 return left;
63 }
64}
65
66static redblack_node_t *
67redblack_right(redblack_node_t * node)
68{
69 if (node->r == LEAF) {
70 return LEAF;
71 }
72 else {
73 RUBY_ASSERT(node->r < GET_SHAPE_TREE()->cache_size);
74 redblack_node_t * right = &GET_SHAPE_TREE()->shape_cache[node->r - 1];
75 return right;
76 }
77}
78
79static redblack_node_t *
80redblack_find(redblack_node_t * tree, ID key)
81{
82 if (tree == LEAF) {
83 return LEAF;
84 }
85 else {
86 RUBY_ASSERT(redblack_left(tree) == LEAF || redblack_left(tree)->key < tree->key);
87 RUBY_ASSERT(redblack_right(tree) == LEAF || redblack_right(tree)->key > tree->key);
88
89 if (tree->key == key) {
90 return tree;
91 }
92 else {
93 if (key < tree->key) {
94 return redblack_find(redblack_left(tree), key);
95 }
96 else {
97 return redblack_find(redblack_right(tree), key);
98 }
99 }
100 }
101}
102
103static inline rb_shape_t *
104redblack_value(redblack_node_t * node)
105{
106 // Color is stored in the bottom bit of the shape pointer
107 // Mask away the bit so we get the actual pointer back
108 return (rb_shape_t *)((uintptr_t)node->value & ~(uintptr_t)1);
109}
110
111#ifdef HAVE_MMAP
112static inline char
113redblack_color(redblack_node_t * node)
114{
115 return node && ((uintptr_t)node->value & RED);
116}
117
118static inline bool
119redblack_red_p(redblack_node_t * node)
120{
121 return redblack_color(node) == RED;
122}
123
124static redblack_id_t
125redblack_id_for(redblack_node_t * node)
126{
127 RUBY_ASSERT(node || node == LEAF);
128 if (node == LEAF) {
129 return 0;
130 }
131 else {
132 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
133 redblack_id_t id = (redblack_id_t)(node - redblack_nodes);
134 return id + 1;
135 }
136}
137
138static redblack_node_t *
139redblack_new(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
140{
141 if (GET_SHAPE_TREE()->cache_size + 1 >= REDBLACK_CACHE_SIZE) {
142 // We're out of cache, just quit
143 return LEAF;
144 }
145
146 RUBY_ASSERT(left == LEAF || left->key < key);
147 RUBY_ASSERT(right == LEAF || right->key > key);
148
149 redblack_node_t * redblack_nodes = GET_SHAPE_TREE()->shape_cache;
150 redblack_node_t * node = &redblack_nodes[(GET_SHAPE_TREE()->cache_size)++];
151 node->key = key;
152 node->value = (rb_shape_t *)((uintptr_t)value | color);
153 node->l = redblack_id_for(left);
154 node->r = redblack_id_for(right);
155 return node;
156}
157
158static redblack_node_t *
159redblack_balance(char color, ID key, rb_shape_t * value, redblack_node_t * left, redblack_node_t * right)
160{
161 if (color == BLACK) {
162 ID new_key, new_left_key, new_right_key;
163 rb_shape_t *new_value, *new_left_value, *new_right_value;
164 redblack_node_t *new_left_left, *new_left_right, *new_right_left, *new_right_right;
165
166 if (redblack_red_p(left) && redblack_red_p(redblack_left(left))) {
167 new_right_key = key;
168 new_right_value = value;
169 new_right_right = right;
170
171 new_key = left->key;
172 new_value = redblack_value(left);
173 new_right_left = redblack_right(left);
174
175 new_left_key = redblack_left(left)->key;
176 new_left_value = redblack_value(redblack_left(left));
177
178 new_left_left = redblack_left(redblack_left(left));
179 new_left_right = redblack_right(redblack_left(left));
180 }
181 else if (redblack_red_p(left) && redblack_red_p(redblack_right(left))) {
182 new_right_key = key;
183 new_right_value = value;
184 new_right_right = right;
185
186 new_left_key = left->key;
187 new_left_value = redblack_value(left);
188 new_left_left = redblack_left(left);
189
190 new_key = redblack_right(left)->key;
191 new_value = redblack_value(redblack_right(left));
192 new_left_right = redblack_left(redblack_right(left));
193 new_right_left = redblack_right(redblack_right(left));
194 }
195 else if (redblack_red_p(right) && redblack_red_p(redblack_left(right))) {
196 new_left_key = key;
197 new_left_value = value;
198 new_left_left = left;
199
200 new_right_key = right->key;
201 new_right_value = redblack_value(right);
202 new_right_right = redblack_right(right);
203
204 new_key = redblack_left(right)->key;
205 new_value = redblack_value(redblack_left(right));
206 new_left_right = redblack_left(redblack_left(right));
207 new_right_left = redblack_right(redblack_left(right));
208 }
209 else if (redblack_red_p(right) && redblack_red_p(redblack_right(right))) {
210 new_left_key = key;
211 new_left_value = value;
212 new_left_left = left;
213
214 new_key = right->key;
215 new_value = redblack_value(right);
216 new_left_right = redblack_left(right);
217
218 new_right_key = redblack_right(right)->key;
219 new_right_value = redblack_value(redblack_right(right));
220 new_right_left = redblack_left(redblack_right(right));
221 new_right_right = redblack_right(redblack_right(right));
222 }
223 else {
224 return redblack_new(color, key, value, left, right);
225 }
226
227 RUBY_ASSERT(new_left_key < new_key);
228 RUBY_ASSERT(new_right_key > new_key);
229 RUBY_ASSERT(new_left_left == LEAF || new_left_left->key < new_left_key);
230 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key > new_left_key);
231 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key < new_key);
232 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key < new_right_key);
233 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key > new_key);
234 RUBY_ASSERT(new_right_right == LEAF || new_right_right->key > new_right_key);
235
236 return redblack_new(
237 RED, new_key, new_value,
238 redblack_new(BLACK, new_left_key, new_left_value, new_left_left, new_left_right),
239 redblack_new(BLACK, new_right_key, new_right_value, new_right_left, new_right_right));
240 }
241
242 return redblack_new(color, key, value, left, right);
243}
244
245static redblack_node_t *
246redblack_insert_aux(redblack_node_t * tree, ID key, rb_shape_t * value)
247{
248 if (tree == LEAF) {
249 return redblack_new(RED, key, value, LEAF, LEAF);
250 }
251 else {
252 redblack_node_t *left, *right;
253 if (key < tree->key) {
254 left = redblack_insert_aux(redblack_left(tree), key, value);
255 RUBY_ASSERT(left != LEAF);
256 right = redblack_right(tree);
257 RUBY_ASSERT(right == LEAF || right->key > tree->key);
258 }
259 else if (key > tree->key) {
260 left = redblack_left(tree);
261 RUBY_ASSERT(left == LEAF || left->key < tree->key);
262 right = redblack_insert_aux(redblack_right(tree), key, value);
263 RUBY_ASSERT(right != LEAF);
264 }
265 else {
266 return tree;
267 }
268
269 return redblack_balance(
270 redblack_color(tree),
271 tree->key,
272 redblack_value(tree),
273 left,
274 right
275 );
276 }
277}
278
279static redblack_node_t *
280redblack_force_black(redblack_node_t * node)
281{
282 node->value = redblack_value(node);
283 return node;
284}
285
286static redblack_node_t *
287redblack_insert(redblack_node_t * tree, ID key, rb_shape_t * value)
288{
289 redblack_node_t * root = redblack_insert_aux(tree, key, value);
290
291 if (redblack_red_p(root)) {
292 return redblack_force_black(root);
293 }
294 else {
295 return root;
296 }
297}
298#endif
299
300rb_shape_tree_t *rb_shape_tree_ptr = NULL;
301
302/*
303 * Shape getters
304 */
306rb_shape_get_root_shape(void)
307{
308 return GET_SHAPE_TREE()->root_shape;
309}
310
311shape_id_t
312rb_shape_id(rb_shape_t * shape)
313{
314 return (shape_id_t)(shape - GET_SHAPE_TREE()->shape_list);
315}
316
317void
318rb_shape_each_shape(each_shape_callback callback, void *data)
319{
320 rb_shape_t *cursor = rb_shape_get_root_shape();
321 rb_shape_t *end = rb_shape_get_shape_by_id(GET_SHAPE_TREE()->next_shape_id);
322 while (cursor < end) {
323 callback(cursor, data);
324 cursor += 1;
325 }
326}
327
328RUBY_FUNC_EXPORTED rb_shape_t *
329rb_shape_get_shape_by_id(shape_id_t shape_id)
330{
331 RUBY_ASSERT(shape_id != INVALID_SHAPE_ID);
332
333 rb_shape_t *shape = &GET_SHAPE_TREE()->shape_list[shape_id];
334 return shape;
335}
336
338rb_shape_get_parent(rb_shape_t * shape)
339{
340 return rb_shape_get_shape_by_id(shape->parent_id);
341}
342
343#if !SHAPE_IN_BASIC_FLAGS
344shape_id_t rb_generic_shape_id(VALUE obj);
345#endif
346
347RUBY_FUNC_EXPORTED shape_id_t
348rb_shape_get_shape_id(VALUE obj)
349{
350 if (RB_SPECIAL_CONST_P(obj)) {
351 return SPECIAL_CONST_SHAPE_ID;
352 }
353
354#if SHAPE_IN_BASIC_FLAGS
355 return RBASIC_SHAPE_ID(obj);
356#else
357 switch (BUILTIN_TYPE(obj)) {
358 case T_OBJECT:
359 return ROBJECT_SHAPE_ID(obj);
360 break;
361 case T_CLASS:
362 case T_MODULE:
363 return RCLASS_SHAPE_ID(obj);
364 default:
365 return rb_generic_shape_id(obj);
366 }
367#endif
368}
369
370size_t
371rb_shape_depth(rb_shape_t * shape)
372{
373 size_t depth = 1;
374
375 while (shape->parent_id != INVALID_SHAPE_ID) {
376 depth++;
377 shape = rb_shape_get_parent(shape);
378 }
379
380 return depth;
381}
382
384rb_shape_get_shape(VALUE obj)
385{
386 return rb_shape_get_shape_by_id(rb_shape_get_shape_id(obj));
387}
388
389static rb_shape_t *
390shape_alloc(void)
391{
392 shape_id_t shape_id = GET_SHAPE_TREE()->next_shape_id;
393 GET_SHAPE_TREE()->next_shape_id++;
394
395 if (shape_id == (MAX_SHAPE_ID + 1)) {
396 // TODO: Make an OutOfShapesError ??
397 rb_bug("Out of shapes");
398 }
399
400 return &GET_SHAPE_TREE()->shape_list[shape_id];
401}
402
403static rb_shape_t *
404rb_shape_alloc_with_parent_id(ID edge_name, shape_id_t parent_id)
405{
406 rb_shape_t * shape = shape_alloc();
407
408 shape->edge_name = edge_name;
409 shape->next_iv_index = 0;
410 shape->parent_id = parent_id;
411 shape->edges = NULL;
412
413 return shape;
414}
415
416static rb_shape_t *
417rb_shape_alloc(ID edge_name, rb_shape_t * parent, enum shape_type type)
418{
419 rb_shape_t * shape = rb_shape_alloc_with_parent_id(edge_name, rb_shape_id(parent));
420 shape->type = (uint8_t)type;
421 shape->heap_index = parent->heap_index;
422 shape->capacity = parent->capacity;
423 shape->edges = 0;
424 return shape;
425}
426
427#ifdef HAVE_MMAP
428static redblack_node_t *
429redblack_cache_ancestors(rb_shape_t * shape)
430{
431 if (!(shape->ancestor_index || shape->parent_id == INVALID_SHAPE_ID)) {
432 redblack_node_t * parent_index;
433
434 parent_index = redblack_cache_ancestors(rb_shape_get_parent(shape));
435
436 if (shape->type == SHAPE_IVAR) {
437 shape->ancestor_index = redblack_insert(parent_index, shape->edge_name, shape);
438
439#if RUBY_DEBUG
440 if (shape->ancestor_index) {
441 redblack_node_t *inserted_node = redblack_find(shape->ancestor_index, shape->edge_name);
442 RUBY_ASSERT(inserted_node);
443 RUBY_ASSERT(redblack_value(inserted_node) == shape);
444 }
445#endif
446 }
447 else {
448 shape->ancestor_index = parent_index;
449 }
450 }
451
452 return shape->ancestor_index;
453}
454#else
455static redblack_node_t *
456redblack_cache_ancestors(rb_shape_t * shape)
457{
458 return LEAF;
459}
460#endif
461
462static rb_shape_t *
463rb_shape_alloc_new_child(ID id, rb_shape_t * shape, enum shape_type shape_type)
464{
465 rb_shape_t * new_shape = rb_shape_alloc(id, shape, shape_type);
466
467 switch (shape_type) {
468 case SHAPE_IVAR:
469 if (UNLIKELY(shape->next_iv_index >= shape->capacity)) {
470 RUBY_ASSERT(shape->next_iv_index == shape->capacity);
471 new_shape->capacity = (uint32_t)rb_malloc_grow_capa(shape->capacity, sizeof(VALUE));
472 }
473 RUBY_ASSERT(new_shape->capacity > shape->next_iv_index);
474 new_shape->next_iv_index = shape->next_iv_index + 1;
475 if (new_shape->next_iv_index > ANCESTOR_CACHE_THRESHOLD) {
476 redblack_cache_ancestors(new_shape);
477 }
478 break;
479 case SHAPE_FROZEN:
480 new_shape->next_iv_index = shape->next_iv_index;
481 break;
482 case SHAPE_OBJ_TOO_COMPLEX:
483 case SHAPE_ROOT:
484 case SHAPE_T_OBJECT:
485 rb_bug("Unreachable");
486 break;
487 }
488
489 return new_shape;
490}
491
492static rb_shape_t*
493get_next_shape_internal(rb_shape_t * shape, ID id, enum shape_type shape_type, bool * variation_created, bool new_variations_allowed)
494{
495 rb_shape_t *res = NULL;
496
497 // There should never be outgoing edges from "too complex"
498 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
499
500 *variation_created = false;
501
502 RB_VM_LOCK_ENTER();
503 {
504 // If the current shape has children
505 if (shape->edges) {
506 // Check if it only has one child
507 if (SINGLE_CHILD_P(shape->edges)) {
508 rb_shape_t * child = SINGLE_CHILD(shape->edges);
509 // If the one child has a matching edge name, then great,
510 // we found what we want.
511 if (child->edge_name == id) {
512 res = child;
513 }
514 }
515 else {
516 // If it has more than one child, do a hash lookup to find it.
517 VALUE lookup_result;
518 if (rb_id_table_lookup(shape->edges, id, &lookup_result)) {
519 res = (rb_shape_t *)lookup_result;
520 }
521 }
522 }
523
524 // If we didn't find the shape we're looking for we create it.
525 if (!res) {
526 // If we're not allowed to create a new variation, of if we're out of shapes
527 // we return TOO_COMPLEX_SHAPE.
528 if (!new_variations_allowed || GET_SHAPE_TREE()->next_shape_id > MAX_SHAPE_ID) {
529 res = rb_shape_get_shape_by_id(OBJ_TOO_COMPLEX_SHAPE_ID);
530 }
531 else {
532 rb_shape_t * new_shape = rb_shape_alloc_new_child(id, shape, shape_type);
533
534 if (!shape->edges) {
535 // If the shape had no edge yet, we can directly set the new child
536 shape->edges = TAG_SINGLE_CHILD(new_shape);
537 }
538 else {
539 // If the edge was single child we need to allocate a table.
540 if (SINGLE_CHILD_P(shape->edges)) {
541 rb_shape_t * old_child = SINGLE_CHILD(shape->edges);
542 shape->edges = rb_id_table_create(2);
543 rb_id_table_insert(shape->edges, old_child->edge_name, (VALUE)old_child);
544 }
545
546 rb_id_table_insert(shape->edges, new_shape->edge_name, (VALUE)new_shape);
547 *variation_created = true;
548 }
549
550 res = new_shape;
551 }
552 }
553 }
554 RB_VM_LOCK_LEAVE();
555
556 return res;
557}
558
559int
560rb_shape_frozen_shape_p(rb_shape_t* shape)
561{
562 return SHAPE_FROZEN == (enum shape_type)shape->type;
563}
564
565static rb_shape_t *
566remove_shape_recursive(rb_shape_t *shape, ID id, rb_shape_t **removed_shape)
567{
568 if (shape->parent_id == INVALID_SHAPE_ID) {
569 // We've hit the top of the shape tree and couldn't find the
570 // IV we wanted to remove, so return NULL
571 return NULL;
572 }
573 else {
574 if (shape->type == SHAPE_IVAR && shape->edge_name == id) {
575 *removed_shape = shape;
576
577 return rb_shape_get_parent(shape);
578 }
579 else {
580 // This isn't the IV we want to remove, keep walking up.
581 rb_shape_t *new_parent = remove_shape_recursive(rb_shape_get_parent(shape), id, removed_shape);
582
583 // We found a new parent. Create a child of the new parent that
584 // has the same attributes as this shape.
585 if (new_parent) {
586 if (UNLIKELY(new_parent->type == SHAPE_OBJ_TOO_COMPLEX)) {
587 return new_parent;
588 }
589
590 bool dont_care;
591 rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care, true);
592 if (UNLIKELY(new_child->type == SHAPE_OBJ_TOO_COMPLEX)) {
593 return new_child;
594 }
595
596 RUBY_ASSERT(new_child->capacity <= shape->capacity);
597
598 return new_child;
599 }
600 else {
601 // We went all the way to the top of the shape tree and couldn't
602 // find an IV to remove, so return NULL
603 return NULL;
604 }
605 }
606 }
607}
608
609bool
610rb_shape_transition_shape_remove_ivar(VALUE obj, ID id, rb_shape_t *shape, VALUE *removed)
611{
612 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
613 return false;
614 }
615
616 rb_shape_t *removed_shape = NULL;
617 rb_shape_t *new_shape = remove_shape_recursive(shape, id, &removed_shape);
618 if (new_shape) {
619 RUBY_ASSERT(removed_shape != NULL);
620
621 if (UNLIKELY(new_shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
622 return false;
623 }
624
625 RUBY_ASSERT(new_shape->next_iv_index == shape->next_iv_index - 1);
626
627 VALUE *ivptr;
628 switch(BUILTIN_TYPE(obj)) {
629 case T_CLASS:
630 case T_MODULE:
631 ivptr = RCLASS_IVPTR(obj);
632 break;
633 case T_OBJECT:
634 ivptr = ROBJECT_IVPTR(obj);
635 break;
636 default: {
637 struct gen_ivtbl *ivtbl;
638 rb_gen_ivtbl_get(obj, id, &ivtbl);
639 ivptr = ivtbl->as.shape.ivptr;
640 break;
641 }
642 }
643
644 *removed = ivptr[removed_shape->next_iv_index - 1];
645
646 memmove(&ivptr[removed_shape->next_iv_index - 1], &ivptr[removed_shape->next_iv_index],
647 ((new_shape->next_iv_index + 1) - removed_shape->next_iv_index) * sizeof(VALUE));
648
649 // Re-embed objects when instances become small enough
650 // This is necessary because YJIT assumes that objects with the same shape
651 // have the same embeddedness for efficiency (avoid extra checks)
652 if (BUILTIN_TYPE(obj) == T_OBJECT &&
653 !RB_FL_TEST_RAW(obj, ROBJECT_EMBED) &&
654 rb_obj_embedded_size(new_shape->next_iv_index) <= rb_gc_obj_slot_size(obj)) {
655 RB_FL_SET_RAW(obj, ROBJECT_EMBED);
656 memcpy(ROBJECT_IVPTR(obj), ivptr, new_shape->next_iv_index * sizeof(VALUE));
657 xfree(ivptr);
658 }
659
660 rb_shape_set_shape(obj, new_shape);
661 }
662 return true;
663}
664
666rb_shape_transition_shape_frozen(VALUE obj)
667{
668 rb_shape_t* shape = rb_shape_get_shape(obj);
669 RUBY_ASSERT(shape);
671
672 if (rb_shape_frozen_shape_p(shape) || rb_shape_obj_too_complex(obj)) {
673 return shape;
674 }
675
676 rb_shape_t* next_shape;
677
678 if (shape == rb_shape_get_root_shape()) {
679 return rb_shape_get_shape_by_id(SPECIAL_CONST_SHAPE_ID);
680 }
681
682 bool dont_care;
683 next_shape = get_next_shape_internal(shape, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
684
685 RUBY_ASSERT(next_shape);
686 return next_shape;
687}
688
689/*
690 * This function is used for assertions where we don't want to increment
691 * max_iv_count
692 */
694rb_shape_get_next_iv_shape(rb_shape_t* shape, ID id)
695{
696 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
697 bool dont_care;
698 return get_next_shape_internal(shape, id, SHAPE_IVAR, &dont_care, true);
699}
700
701static inline rb_shape_t *
702shape_get_next(rb_shape_t *shape, VALUE obj, ID id, bool emit_warnings)
703{
704 RUBY_ASSERT(!is_instance_id(id) || RTEST(rb_sym2str(ID2SYM(id))));
705 if (UNLIKELY(shape->type == SHAPE_OBJ_TOO_COMPLEX)) {
706 return shape;
707 }
708
709#if RUBY_DEBUG
710 attr_index_t index;
711 if (rb_shape_get_iv_index(shape, id, &index)) {
712 rb_bug("rb_shape_get_next: trying to create ivar that already exists at index %u", index);
713 }
714#endif
715
716 bool allow_new_shape = true;
717
718 if (BUILTIN_TYPE(obj) == T_OBJECT) {
719 VALUE klass = rb_obj_class(obj);
720 allow_new_shape = RCLASS_EXT(klass)->variation_count < SHAPE_MAX_VARIATIONS;
721 }
722
723 bool variation_created = false;
724 rb_shape_t *new_shape = get_next_shape_internal(shape, id, SHAPE_IVAR, &variation_created, allow_new_shape);
725
726 // Check if we should update max_iv_count on the object's class
727 if (BUILTIN_TYPE(obj) == T_OBJECT) {
728 VALUE klass = rb_obj_class(obj);
729 if (new_shape->next_iv_index > RCLASS_EXT(klass)->max_iv_count) {
730 RCLASS_EXT(klass)->max_iv_count = new_shape->next_iv_index;
731 }
732
733 if (variation_created) {
734 RCLASS_EXT(klass)->variation_count++;
735 if (emit_warnings && rb_warning_category_enabled_p(RB_WARN_CATEGORY_PERFORMANCE)) {
736 if (RCLASS_EXT(klass)->variation_count >= SHAPE_MAX_VARIATIONS) {
739 "The class %"PRIsVALUE" reached %d shape variations, instance variables accesses will be slower and memory usage increased.\n"
740 "It is recommended to define instance variables in a consistent order, for instance by eagerly defining them all in the #initialize method.",
741 rb_class_path(klass),
742 SHAPE_MAX_VARIATIONS
743 );
744 }
745 }
746 }
747 }
748
749 return new_shape;
750}
751
753rb_shape_get_next(rb_shape_t *shape, VALUE obj, ID id)
754{
755 return shape_get_next(shape, obj, id, true);
756}
757
759rb_shape_get_next_no_warnings(rb_shape_t *shape, VALUE obj, ID id)
760{
761 return shape_get_next(shape, obj, id, false);
762}
763
764// Same as rb_shape_get_iv_index, but uses a provided valid shape id and index
765// to return a result faster if branches of the shape tree are closely related.
766bool
767rb_shape_get_iv_index_with_hint(shape_id_t shape_id, ID id, attr_index_t *value, shape_id_t *shape_id_hint)
768{
769 attr_index_t index_hint = *value;
770 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
771 rb_shape_t *initial_shape = shape;
772
773 if (*shape_id_hint == INVALID_SHAPE_ID) {
774 *shape_id_hint = shape_id;
775 return rb_shape_get_iv_index(shape, id, value);
776 }
777
778 rb_shape_t * shape_hint = rb_shape_get_shape_by_id(*shape_id_hint);
779
780 // We assume it's likely shape_id_hint and shape_id have a close common
781 // ancestor, so we check up to ANCESTOR_SEARCH_MAX_DEPTH ancestors before
782 // eventually using the index, as in case of a match it will be faster.
783 // However if the shape doesn't have an index, we walk the entire tree.
784 int depth = INT_MAX;
785 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
786 depth = ANCESTOR_SEARCH_MAX_DEPTH;
787 }
788
789 while (depth > 0 && shape->next_iv_index > index_hint) {
790 while (shape_hint->next_iv_index > shape->next_iv_index) {
791 shape_hint = rb_shape_get_parent(shape_hint);
792 }
793
794 if (shape_hint == shape) {
795 // We've found a common ancestor so use the index hint
796 *value = index_hint;
797 *shape_id_hint = rb_shape_id(shape);
798 return true;
799 }
800 if (shape->edge_name == id) {
801 // We found the matching id before a common ancestor
802 *value = shape->next_iv_index - 1;
803 *shape_id_hint = rb_shape_id(shape);
804 return true;
805 }
806
807 shape = rb_shape_get_parent(shape);
808 depth--;
809 }
810
811 // If the original shape had an index but its ancestor doesn't
812 // we switch back to the original one as it will be faster.
813 if (!shape->ancestor_index && initial_shape->ancestor_index) {
814 shape = initial_shape;
815 }
816 *shape_id_hint = shape_id;
817 return rb_shape_get_iv_index(shape, id, value);
818}
819
820static bool
821shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
822{
823 while (shape->parent_id != INVALID_SHAPE_ID) {
824 if (shape->edge_name == id) {
825 enum shape_type shape_type;
826 shape_type = (enum shape_type)shape->type;
827
828 switch (shape_type) {
829 case SHAPE_IVAR:
830 RUBY_ASSERT(shape->next_iv_index > 0);
831 *value = shape->next_iv_index - 1;
832 return true;
833 case SHAPE_ROOT:
834 case SHAPE_T_OBJECT:
835 return false;
836 case SHAPE_OBJ_TOO_COMPLEX:
837 case SHAPE_FROZEN:
838 rb_bug("Ivar should not exist on transition");
839 }
840 }
841
842 shape = rb_shape_get_parent(shape);
843 }
844
845 return false;
846}
847
848static bool
849shape_cache_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
850{
851 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
852 redblack_node_t *node = redblack_find(shape->ancestor_index, id);
853 if (node) {
854 rb_shape_t *shape = redblack_value(node);
855 *value = shape->next_iv_index - 1;
856
857#if RUBY_DEBUG
858 attr_index_t shape_tree_index;
859 RUBY_ASSERT(shape_get_iv_index(shape, id, &shape_tree_index));
860 RUBY_ASSERT(shape_tree_index == *value);
861#endif
862
863 return true;
864 }
865
866 /* Verify the cache is correct by checking that this instance variable
867 * does not exist in the shape tree either. */
868 RUBY_ASSERT(!shape_get_iv_index(shape, id, value));
869 }
870
871 return false;
872}
873
874bool
875rb_shape_get_iv_index(rb_shape_t *shape, ID id, attr_index_t *value)
876{
877 // It doesn't make sense to ask for the index of an IV that's stored
878 // on an object that is "too complex" as it uses a hash for storing IVs
879 RUBY_ASSERT(rb_shape_id(shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
880
881 if (!shape_cache_get_iv_index(shape, id, value)) {
882 // If it wasn't in the ancestor cache, then don't do a linear search
883 if (shape->ancestor_index && shape->next_iv_index >= ANCESTOR_CACHE_THRESHOLD) {
884 return false;
885 }
886 else {
887 return shape_get_iv_index(shape, id, value);
888 }
889 }
890
891 return true;
892}
893
894void
895rb_shape_set_shape(VALUE obj, rb_shape_t* shape)
896{
897 rb_shape_set_shape_id(obj, rb_shape_id(shape));
898}
899
900int32_t
901rb_shape_id_offset(void)
902{
903 return sizeof(uintptr_t) - SHAPE_ID_NUM_BITS / sizeof(uintptr_t);
904}
905
907rb_shape_traverse_from_new_root(rb_shape_t *initial_shape, rb_shape_t *dest_shape)
908{
909 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
910 rb_shape_t *next_shape = initial_shape;
911
912 if (dest_shape->type != initial_shape->type) {
913 next_shape = rb_shape_traverse_from_new_root(initial_shape, rb_shape_get_parent(dest_shape));
914 if (!next_shape) {
915 return NULL;
916 }
917 }
918
919 switch ((enum shape_type)dest_shape->type) {
920 case SHAPE_IVAR:
921 case SHAPE_FROZEN:
922 if (!next_shape->edges) {
923 return NULL;
924 }
925
926 VALUE lookup_result;
927 if (SINGLE_CHILD_P(next_shape->edges)) {
928 rb_shape_t * child = SINGLE_CHILD(next_shape->edges);
929 if (child->edge_name == dest_shape->edge_name) {
930 return child;
931 }
932 else {
933 return NULL;
934 }
935 }
936 else {
937 if (rb_id_table_lookup(next_shape->edges, dest_shape->edge_name, &lookup_result)) {
938 next_shape = (rb_shape_t *)lookup_result;
939 }
940 else {
941 return NULL;
942 }
943 }
944 break;
945 case SHAPE_ROOT:
946 case SHAPE_T_OBJECT:
947 break;
948 case SHAPE_OBJ_TOO_COMPLEX:
949 rb_bug("Unreachable");
950 break;
951 }
952
953 return next_shape;
954}
955
957rb_shape_rebuild_shape(rb_shape_t * initial_shape, rb_shape_t * dest_shape)
958{
959 RUBY_ASSERT(rb_shape_id(initial_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
960 RUBY_ASSERT(rb_shape_id(dest_shape) != OBJ_TOO_COMPLEX_SHAPE_ID);
961
962 rb_shape_t * midway_shape;
963
964 RUBY_ASSERT(initial_shape->type == SHAPE_T_OBJECT);
965
966 if (dest_shape->type != initial_shape->type) {
967 midway_shape = rb_shape_rebuild_shape(initial_shape, rb_shape_get_parent(dest_shape));
968 if (UNLIKELY(rb_shape_id(midway_shape) == OBJ_TOO_COMPLEX_SHAPE_ID)) {
969 return midway_shape;
970 }
971 }
972 else {
973 midway_shape = initial_shape;
974 }
975
976 switch ((enum shape_type)dest_shape->type) {
977 case SHAPE_IVAR:
978 midway_shape = rb_shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
979 break;
980 case SHAPE_ROOT:
981 case SHAPE_FROZEN:
982 case SHAPE_T_OBJECT:
983 break;
984 case SHAPE_OBJ_TOO_COMPLEX:
985 rb_bug("Unreachable");
986 break;
987 }
988
989 return midway_shape;
990}
991
992RUBY_FUNC_EXPORTED bool
993rb_shape_obj_too_complex(VALUE obj)
994{
995 return rb_shape_get_shape_id(obj) == OBJ_TOO_COMPLEX_SHAPE_ID;
996}
997
998size_t
999rb_shape_edges_count(rb_shape_t *shape)
1000{
1001 if (shape->edges) {
1002 if (SINGLE_CHILD_P(shape->edges)) {
1003 return 1;
1004 }
1005 else {
1006 return rb_id_table_size(shape->edges);
1007 }
1008 }
1009 return 0;
1010}
1011
1012size_t
1013rb_shape_memsize(rb_shape_t *shape)
1014{
1015 size_t memsize = sizeof(rb_shape_t);
1016 if (shape->edges && !SINGLE_CHILD_P(shape->edges)) {
1017 memsize += rb_id_table_memsize(shape->edges);
1018 }
1019 return memsize;
1020}
1021
1022#if SHAPE_DEBUG
1023/*
1024 * Exposing Shape to Ruby via RubyVM.debug_shape
1025 */
1026
1027static VALUE
1028rb_shape_too_complex(VALUE self)
1029{
1030 rb_shape_t * shape;
1031 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1032 if (rb_shape_id(shape) == OBJ_TOO_COMPLEX_SHAPE_ID) {
1033 return Qtrue;
1034 }
1035 else {
1036 return Qfalse;
1037 }
1038}
1039
1040static VALUE
1041parse_key(ID key)
1042{
1043 if (is_instance_id(key)) {
1044 return ID2SYM(key);
1045 }
1046 return LONG2NUM(key);
1047}
1048
1049static VALUE rb_shape_edge_name(rb_shape_t * shape);
1050
1051static VALUE
1052rb_shape_t_to_rb_cShape(rb_shape_t *shape)
1053{
1054 VALUE rb_cShape = rb_const_get(rb_cRubyVM, rb_intern("Shape"));
1055
1056 VALUE obj = rb_struct_new(rb_cShape,
1057 INT2NUM(rb_shape_id(shape)),
1058 INT2NUM(shape->parent_id),
1059 rb_shape_edge_name(shape),
1060 INT2NUM(shape->next_iv_index),
1061 INT2NUM(shape->heap_index),
1062 INT2NUM(shape->type),
1063 INT2NUM(shape->capacity));
1064 rb_obj_freeze(obj);
1065 return obj;
1066}
1067
1068static enum rb_id_table_iterator_result
1069rb_edges_to_hash(ID key, VALUE value, void *ref)
1070{
1071 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_shape_t_to_rb_cShape((rb_shape_t*)value));
1072 return ID_TABLE_CONTINUE;
1073}
1074
1075static VALUE
1076rb_shape_edges(VALUE self)
1077{
1078 rb_shape_t* shape;
1079
1080 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1081
1082 VALUE hash = rb_hash_new();
1083
1084 if (shape->edges) {
1085 if (SINGLE_CHILD_P(shape->edges)) {
1086 rb_shape_t * child = SINGLE_CHILD(shape->edges);
1087 rb_edges_to_hash(child->edge_name, (VALUE)child, &hash);
1088 }
1089 else {
1090 rb_id_table_foreach(shape->edges, rb_edges_to_hash, &hash);
1091 }
1092 }
1093
1094 return hash;
1095}
1096
1097static VALUE
1098rb_shape_edge_name(rb_shape_t * shape)
1099{
1100 if (shape->edge_name) {
1101 if (is_instance_id(shape->edge_name)) {
1102 return ID2SYM(shape->edge_name);
1103 }
1104 return INT2NUM(shape->capacity);
1105 }
1106 return Qnil;
1107}
1108
1109static VALUE
1110rb_shape_export_depth(VALUE self)
1111{
1112 rb_shape_t* shape;
1113 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1114 return SIZET2NUM(rb_shape_depth(shape));
1115}
1116
1117static VALUE
1118rb_shape_parent(VALUE self)
1119{
1120 rb_shape_t * shape;
1121 shape = rb_shape_get_shape_by_id(NUM2INT(rb_struct_getmember(self, rb_intern("id"))));
1122 if (shape->parent_id != INVALID_SHAPE_ID) {
1123 return rb_shape_t_to_rb_cShape(rb_shape_get_parent(shape));
1124 }
1125 else {
1126 return Qnil;
1127 }
1128}
1129
1130static VALUE
1131rb_shape_debug_shape(VALUE self, VALUE obj)
1132{
1133 return rb_shape_t_to_rb_cShape(rb_shape_get_shape(obj));
1134}
1135
1136static VALUE
1137rb_shape_root_shape(VALUE self)
1138{
1139 return rb_shape_t_to_rb_cShape(rb_shape_get_root_shape());
1140}
1141
1142static VALUE
1143rb_shape_shapes_available(VALUE self)
1144{
1145 return INT2NUM(MAX_SHAPE_ID - (GET_SHAPE_TREE()->next_shape_id - 1));
1146}
1147
1148static VALUE
1149rb_shape_exhaust(int argc, VALUE *argv, VALUE self)
1150{
1151 rb_check_arity(argc, 0, 1);
1152 int offset = argc == 1 ? NUM2INT(argv[0]) : 0;
1153 GET_SHAPE_TREE()->next_shape_id = MAX_SHAPE_ID - offset + 1;
1154 return Qnil;
1155}
1156
1157VALUE rb_obj_shape(rb_shape_t* shape);
1158
1159static enum rb_id_table_iterator_result collect_keys_and_values(ID key, VALUE value, void *ref)
1160{
1161 rb_hash_aset(*(VALUE *)ref, parse_key(key), rb_obj_shape((rb_shape_t*)value));
1162 return ID_TABLE_CONTINUE;
1163}
1164
1165static VALUE edges(struct rb_id_table* edges)
1166{
1167 VALUE hash = rb_hash_new();
1168 if (SINGLE_CHILD_P(edges)) {
1169 rb_shape_t * child = SINGLE_CHILD(edges);
1170 collect_keys_and_values(child->edge_name, (VALUE)child, &hash);
1171 }
1172 else {
1173 rb_id_table_foreach(edges, collect_keys_and_values, &hash);
1174 }
1175 return hash;
1176}
1177
1178VALUE
1179rb_obj_shape(rb_shape_t* shape)
1180{
1181 VALUE rb_shape = rb_hash_new();
1182
1183 rb_hash_aset(rb_shape, ID2SYM(rb_intern("id")), INT2NUM(rb_shape_id(shape)));
1184 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edges")), edges(shape->edges));
1185
1186 if (shape == rb_shape_get_root_shape()) {
1187 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(ROOT_SHAPE_ID));
1188 }
1189 else {
1190 rb_hash_aset(rb_shape, ID2SYM(rb_intern("parent_id")), INT2NUM(shape->parent_id));
1191 }
1192
1193 rb_hash_aset(rb_shape, ID2SYM(rb_intern("edge_name")), rb_id2str(shape->edge_name));
1194 return rb_shape;
1195}
1196
1197static VALUE
1198shape_transition_tree(VALUE self)
1199{
1200 return rb_obj_shape(rb_shape_get_root_shape());
1201}
1202
1203static VALUE
1204rb_shape_find_by_id(VALUE mod, VALUE id)
1205{
1206 shape_id_t shape_id = NUM2UINT(id);
1207 if (shape_id >= GET_SHAPE_TREE()->next_shape_id) {
1208 rb_raise(rb_eArgError, "Shape ID %d is out of bounds\n", shape_id);
1209 }
1210 return rb_shape_t_to_rb_cShape(rb_shape_get_shape_by_id(shape_id));
1211}
1212#endif
1213
1214#ifdef HAVE_MMAP
1215#include <sys/mman.h>
1216#endif
1217
1218void
1219Init_default_shapes(void)
1220{
1221 rb_shape_tree_ptr = xcalloc(1, sizeof(rb_shape_tree_t));
1222
1223#ifdef HAVE_MMAP
1224 size_t shape_list_mmap_size = rb_size_mul_or_raise(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t), rb_eRuntimeError);
1225 rb_shape_tree_ptr->shape_list = (rb_shape_t *)mmap(NULL, shape_list_mmap_size,
1226 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1227 if (GET_SHAPE_TREE()->shape_list == MAP_FAILED) {
1228 GET_SHAPE_TREE()->shape_list = 0;
1229 }
1230 else {
1231 ruby_annotate_mmap(rb_shape_tree_ptr->shape_list, shape_list_mmap_size, "Ruby:Init_default_shapes:shape_list");
1232 }
1233#else
1234 GET_SHAPE_TREE()->shape_list = xcalloc(SHAPE_BUFFER_SIZE, sizeof(rb_shape_t));
1235#endif
1236
1237 if (!GET_SHAPE_TREE()->shape_list) {
1238 rb_memerror();
1239 }
1240
1241 id_frozen = rb_make_internal_id();
1242 id_t_object = rb_make_internal_id();
1243
1244#ifdef HAVE_MMAP
1245 size_t shape_cache_mmap_size = rb_size_mul_or_raise(REDBLACK_CACHE_SIZE, sizeof(redblack_node_t), rb_eRuntimeError);
1246 rb_shape_tree_ptr->shape_cache = (redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
1247 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1248 rb_shape_tree_ptr->cache_size = 0;
1249
1250 // If mmap fails, then give up on the redblack tree cache.
1251 // We set the cache size such that the redblack node allocators think
1252 // the cache is full.
1253 if (GET_SHAPE_TREE()->shape_cache == MAP_FAILED) {
1254 GET_SHAPE_TREE()->shape_cache = 0;
1255 GET_SHAPE_TREE()->cache_size = REDBLACK_CACHE_SIZE;
1256 }
1257 else {
1258 ruby_annotate_mmap(rb_shape_tree_ptr->shape_cache, shape_cache_mmap_size, "Ruby:Init_default_shapes:shape_cache");
1259 }
1260#endif
1261
1262 // Root shape
1263 rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1264 root->capacity = 0;
1265 root->type = SHAPE_ROOT;
1266 root->heap_index = 0;
1267 GET_SHAPE_TREE()->root_shape = root;
1268 RUBY_ASSERT(rb_shape_id(GET_SHAPE_TREE()->root_shape) == ROOT_SHAPE_ID);
1269
1270 bool dont_care;
1271 // Special const shape
1272#if RUBY_DEBUG
1273 rb_shape_t *special_const_shape =
1274#endif
1275 get_next_shape_internal(root, (ID)id_frozen, SHAPE_FROZEN, &dont_care, true);
1276 RUBY_ASSERT(rb_shape_id(special_const_shape) == SPECIAL_CONST_SHAPE_ID);
1277 RUBY_ASSERT(SPECIAL_CONST_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1278 RUBY_ASSERT(rb_shape_frozen_shape_p(special_const_shape));
1279
1280 rb_shape_t *too_complex_shape = rb_shape_alloc_with_parent_id(0, ROOT_SHAPE_ID);
1281 too_complex_shape->type = SHAPE_OBJ_TOO_COMPLEX;
1282 too_complex_shape->heap_index = 0;
1283 RUBY_ASSERT(OBJ_TOO_COMPLEX_SHAPE_ID == (GET_SHAPE_TREE()->next_shape_id - 1));
1284 RUBY_ASSERT(rb_shape_id(too_complex_shape) == OBJ_TOO_COMPLEX_SHAPE_ID);
1285
1286 // Make shapes for T_OBJECT
1287 size_t *sizes = rb_gc_heap_sizes();
1288 for (int i = 0; sizes[i] > 0; i++) {
1289 rb_shape_t *t_object_shape = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1290 t_object_shape->type = SHAPE_T_OBJECT;
1291 t_object_shape->heap_index = i;
1292 t_object_shape->capacity = (uint32_t)((sizes[i] - offsetof(struct RObject, as.ary)) / sizeof(VALUE));
1293 t_object_shape->edges = rb_id_table_create(0);
1294 t_object_shape->ancestor_index = LEAF;
1295 RUBY_ASSERT(rb_shape_id(t_object_shape) == (shape_id_t)(i + FIRST_T_OBJECT_SHAPE_ID));
1296 }
1297}
1298
1299void
1300Init_shape(void)
1301{
1302#if SHAPE_DEBUG
1303 /* Document-class: RubyVM::Shape
1304 * :nodoc: */
1305 VALUE rb_cShape = rb_struct_define_under(rb_cRubyVM, "Shape",
1306 "id",
1307 "parent_id",
1308 "edge_name",
1309 "next_iv_index",
1310 "heap_index",
1311 "type",
1312 "capacity",
1313 NULL);
1314
1315 rb_define_method(rb_cShape, "parent", rb_shape_parent, 0);
1316 rb_define_method(rb_cShape, "edges", rb_shape_edges, 0);
1317 rb_define_method(rb_cShape, "depth", rb_shape_export_depth, 0);
1318 rb_define_method(rb_cShape, "too_complex?", rb_shape_too_complex, 0);
1319 rb_define_const(rb_cShape, "SHAPE_ROOT", INT2NUM(SHAPE_ROOT));
1320 rb_define_const(rb_cShape, "SHAPE_IVAR", INT2NUM(SHAPE_IVAR));
1321 rb_define_const(rb_cShape, "SHAPE_T_OBJECT", INT2NUM(SHAPE_T_OBJECT));
1322 rb_define_const(rb_cShape, "SHAPE_FROZEN", INT2NUM(SHAPE_FROZEN));
1323 rb_define_const(rb_cShape, "SHAPE_ID_NUM_BITS", INT2NUM(SHAPE_ID_NUM_BITS));
1324 rb_define_const(rb_cShape, "SHAPE_FLAG_SHIFT", INT2NUM(SHAPE_FLAG_SHIFT));
1325 rb_define_const(rb_cShape, "SPECIAL_CONST_SHAPE_ID", INT2NUM(SPECIAL_CONST_SHAPE_ID));
1326 rb_define_const(rb_cShape, "OBJ_TOO_COMPLEX_SHAPE_ID", INT2NUM(OBJ_TOO_COMPLEX_SHAPE_ID));
1327 rb_define_const(rb_cShape, "FIRST_T_OBJECT_SHAPE_ID", INT2NUM(FIRST_T_OBJECT_SHAPE_ID));
1328 rb_define_const(rb_cShape, "SHAPE_MAX_VARIATIONS", INT2NUM(SHAPE_MAX_VARIATIONS));
1329 rb_define_const(rb_cShape, "SIZEOF_RB_SHAPE_T", INT2NUM(sizeof(rb_shape_t)));
1330 rb_define_const(rb_cShape, "SIZEOF_REDBLACK_NODE_T", INT2NUM(sizeof(redblack_node_t)));
1331 rb_define_const(rb_cShape, "SHAPE_BUFFER_SIZE", INT2NUM(sizeof(rb_shape_t) * SHAPE_BUFFER_SIZE));
1332 rb_define_const(rb_cShape, "REDBLACK_CACHE_SIZE", INT2NUM(sizeof(redblack_node_t) * REDBLACK_CACHE_SIZE));
1333
1334 rb_define_singleton_method(rb_cShape, "transition_tree", shape_transition_tree, 0);
1335 rb_define_singleton_method(rb_cShape, "find_by_id", rb_shape_find_by_id, 1);
1336 rb_define_singleton_method(rb_cShape, "of", rb_shape_debug_shape, 1);
1337 rb_define_singleton_method(rb_cShape, "root_shape", rb_shape_root_shape, 0);
1338 rb_define_singleton_method(rb_cShape, "shapes_available", rb_shape_shapes_available, 0);
1339 rb_define_singleton_method(rb_cShape, "exhaust_shapes", rb_shape_exhaust, -1);
1340#endif
1341}
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:219
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
static VALUE RB_FL_TEST_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_TEST().
Definition fl_type.h:469
static void RB_FL_SET_RAW(VALUE obj, VALUE flags)
This is an implementation detail of RB_FL_SET().
Definition fl_type.h:606
static bool RB_OBJ_FROZEN(VALUE obj)
Checks if an object is frozen.
Definition fl_type.h:898
#define xfree
Old name of ruby_xfree.
Definition xmalloc.h:58
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define SIZET2NUM
Old name of RB_SIZE2NUM.
Definition size_t.h:62
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define NUM2UINT
Old name of RB_NUM2UINT.
Definition int.h:45
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define Qtrue
Old name of RUBY_Qtrue.
#define NUM2INT
Old name of RB_NUM2INT.
Definition int.h:44
#define INT2NUM
Old name of RB_INT2NUM.
Definition int.h:43
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define xcalloc
Old name of ruby_xcalloc.
Definition xmalloc.h:55
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
Definition error.c:476
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1428
@ RB_WARN_CATEGORY_PERFORMANCE
Warning is for performance issues (not enabled by -w).
Definition error.h:54
size_t rb_obj_embedded_size(uint32_t numiv)
Internal header for Object.
Definition object.c:98
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:247
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
Definition object.c:1260
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:284
VALUE rb_struct_define_under(VALUE space, const char *name,...)
Identical to rb_struct_define(), except it defines the class under the specified namespace instead of...
Definition struct.c:505
VALUE rb_struct_new(VALUE klass,...)
Creates an instance of the given struct.
Definition struct.c:842
VALUE rb_struct_getmember(VALUE self, ID key)
Identical to rb_struct_aref(), except it takes ID instead of VALUE.
Definition struct.c:232
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:3163
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:293
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
Definition symbol.c:970
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:136
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
C99 shim for <stdbool.h>
Ruby's ordinal objects.
Definition robject.h:83
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40