6#include "internal/class.h"
7#include "internal/error.h"
8#include "internal/gc.h"
9#include "internal/object.h"
10#include "internal/symbol.h"
11#include "internal/variable.h"
20#define SHAPE_DEBUG (VM_CHECK_MODE > 0)
23#define REDBLACK_CACHE_SIZE (SHAPE_BUFFER_SIZE * 32)
27#define SINGLE_CHILD_TAG 0x1
28#define TAG_SINGLE_CHILD(x) (VALUE)((uintptr_t)(x) | SINGLE_CHILD_TAG)
29#define SINGLE_CHILD_MASK (~((uintptr_t)SINGLE_CHILD_TAG))
30#define SINGLE_CHILD_P(x) ((uintptr_t)(x) & SINGLE_CHILD_TAG)
31#define SINGLE_CHILD(x) (rb_shape_t *)((uintptr_t)(x) & SINGLE_CHILD_MASK)
32#define ANCESTOR_CACHE_THRESHOLD 10
33#define MAX_SHAPE_ID (SHAPE_BUFFER_SIZE - 1)
34#define ANCESTOR_SEARCH_MAX_DEPTH 2
36static ID id_object_id;
45 if (node->l == LEAF) {
58 if (node->r == LEAF) {
75 RUBY_ASSERT(redblack_left(tree) == LEAF || redblack_left(tree)->key < tree->key);
76 RUBY_ASSERT(redblack_right(tree) == LEAF || redblack_right(tree)->key > tree->key);
78 if (tree->key == key) {
82 if (key < tree->key) {
83 return redblack_find(redblack_left(tree), key);
86 return redblack_find(redblack_right(tree), key);
97 return (
rb_shape_t *)((uintptr_t)node->value & ~(uintptr_t)1);
104 return node && ((uintptr_t)node->value & RED);
110 return redblack_color(node) == RED;
122 redblack_id_t
id = (redblack_id_t)(node - redblack_nodes);
130 if (rb_shape_tree.cache_size + 1 >= REDBLACK_CACHE_SIZE) {
141 node->value = (
rb_shape_t *)((uintptr_t)value | color);
142 node->l = redblack_id_for(left);
143 node->r = redblack_id_for(right);
150 if (color == BLACK) {
151 ID new_key, new_left_key, new_right_key;
152 rb_shape_t *new_value, *new_left_value, *new_right_value;
153 redblack_node_t *new_left_left, *new_left_right, *new_right_left, *new_right_right;
155 if (redblack_red_p(left) && redblack_red_p(redblack_left(left))) {
157 new_right_value = value;
158 new_right_right = right;
161 new_value = redblack_value(left);
162 new_right_left = redblack_right(left);
164 new_left_key = redblack_left(left)->key;
165 new_left_value = redblack_value(redblack_left(left));
167 new_left_left = redblack_left(redblack_left(left));
168 new_left_right = redblack_right(redblack_left(left));
170 else if (redblack_red_p(left) && redblack_red_p(redblack_right(left))) {
172 new_right_value = value;
173 new_right_right = right;
175 new_left_key = left->key;
176 new_left_value = redblack_value(left);
177 new_left_left = redblack_left(left);
179 new_key = redblack_right(left)->key;
180 new_value = redblack_value(redblack_right(left));
181 new_left_right = redblack_left(redblack_right(left));
182 new_right_left = redblack_right(redblack_right(left));
184 else if (redblack_red_p(right) && redblack_red_p(redblack_left(right))) {
186 new_left_value = value;
187 new_left_left = left;
189 new_right_key = right->key;
190 new_right_value = redblack_value(right);
191 new_right_right = redblack_right(right);
193 new_key = redblack_left(right)->key;
194 new_value = redblack_value(redblack_left(right));
195 new_left_right = redblack_left(redblack_left(right));
196 new_right_left = redblack_right(redblack_left(right));
198 else if (redblack_red_p(right) && redblack_red_p(redblack_right(right))) {
200 new_left_value = value;
201 new_left_left = left;
203 new_key = right->key;
204 new_value = redblack_value(right);
205 new_left_right = redblack_left(right);
207 new_right_key = redblack_right(right)->key;
208 new_right_value = redblack_value(redblack_right(right));
209 new_right_left = redblack_left(redblack_right(right));
210 new_right_right = redblack_right(redblack_right(right));
213 return redblack_new(color, key, value, left, right);
218 RUBY_ASSERT(new_left_left == LEAF || new_left_left->key < new_left_key);
219 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key > new_left_key);
220 RUBY_ASSERT(new_left_right == LEAF || new_left_right->key < new_key);
221 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key < new_right_key);
222 RUBY_ASSERT(new_right_left == LEAF || new_right_left->key > new_key);
223 RUBY_ASSERT(new_right_right == LEAF || new_right_right->key > new_right_key);
226 RED, new_key, new_value,
227 redblack_new(BLACK, new_left_key, new_left_value, new_left_left, new_left_right),
228 redblack_new(BLACK, new_right_key, new_right_value, new_right_left, new_right_right));
231 return redblack_new(color, key, value, left, right);
238 return redblack_new(RED, key, value, LEAF, LEAF);
242 if (key < tree->key) {
243 left = redblack_insert_aux(redblack_left(tree), key, value);
245 right = redblack_right(tree);
246 RUBY_ASSERT(right == LEAF || right->key > tree->key);
248 else if (key > tree->key) {
249 left = redblack_left(tree);
250 RUBY_ASSERT(left == LEAF || left->key < tree->key);
251 right = redblack_insert_aux(redblack_right(tree), key, value);
258 return redblack_balance(
259 redblack_color(tree),
261 redblack_value(tree),
271 node->value = redblack_value(node);
280 if (redblack_red_p(root)) {
281 return redblack_force_black(root);
293rb_shape_get_root_shape(
void)
295 return rb_shape_tree.root_shape;
299shape_tree_mark(
void *data)
301 rb_shape_t *cursor = rb_shape_get_root_shape();
302 rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
303 while (cursor <= end) {
304 if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
305 rb_gc_mark_movable(cursor->edges);
312shape_tree_compact(
void *data)
314 rb_shape_t *cursor = rb_shape_get_root_shape();
315 rb_shape_t *end = RSHAPE(rb_shape_tree.next_shape_id - 1);
316 while (cursor <= end) {
317 if (cursor->edges && !SINGLE_CHILD_P(cursor->edges)) {
318 cursor->edges = rb_gc_location(cursor->edges);
325shape_tree_memsize(
const void *data)
333 .dmark = shape_tree_mark,
335 .dsize = shape_tree_memsize,
336 .dcompact = shape_tree_compact,
338 .flags = RUBY_TYPED_FREE_IMMEDIATELY | RUBY_TYPED_WB_PROTECTED,
346static inline shape_id_t
350 return (shape_id_t)(shape - rb_shape_tree.shape_list);
353static inline shape_id_t
354shape_id(
rb_shape_t *shape, shape_id_t previous_shape_id)
357 shape_id_t raw_id = (shape_id_t)(shape - rb_shape_tree.shape_list);
358 return raw_id | (previous_shape_id & SHAPE_ID_FLAGS_MASK);
363shape_frozen_p(shape_id_t shape_id)
365 return shape_id & SHAPE_ID_FL_FROZEN;
370rb_shape_each_shape_id(each_shape_callback callback,
void *data)
372 rb_shape_t *start = rb_shape_get_root_shape();
375 while (cursor < end) {
376 callback((shape_id_t)(cursor - start), data);
381RUBY_FUNC_EXPORTED shape_id_t
382rb_obj_shape_id(
VALUE obj)
385 return SPECIAL_CONST_SHAPE_ID;
389 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
391 return RBASIC_SHAPE_ID(fields_obj);
393 return ROOT_SHAPE_ID;
395 return RBASIC_SHAPE_ID(obj);
399rb_shape_depth(shape_id_t shape_id)
404 while (shape->parent_id != INVALID_SHAPE_ID) {
406 shape = RSHAPE(shape->parent_id);
415 shape_id_t current, new_id;
419 if (current > MAX_SHAPE_ID) {
422 new_id = current + 1;
423 }
while (current !=
RUBY_ATOMIC_CAS(rb_shape_tree.next_shape_id, current, new_id));
425 return &rb_shape_tree.shape_list[current];
429rb_shape_alloc_with_parent_id(
ID edge_name, shape_id_t parent_id)
432 if (!shape)
return NULL;
434 shape->edge_name = edge_name;
435 shape->next_field_index = 0;
436 shape->parent_id = parent_id;
445 rb_shape_t *shape = rb_shape_alloc_with_parent_id(edge_name, raw_shape_id(parent));
446 if (!shape)
return NULL;
448 shape->type = (uint8_t)
type;
449 shape->capacity = parent->capacity;
458 if (!(shape->ancestor_index || shape->parent_id == INVALID_SHAPE_ID)) {
461 parent_index = redblack_cache_ancestors(RSHAPE(shape->parent_id));
463 if (shape->type == SHAPE_IVAR) {
464 shape->ancestor_index = redblack_insert(parent_index, shape->edge_name, shape);
467 if (shape->ancestor_index) {
468 redblack_node_t *inserted_node = redblack_find(shape->ancestor_index, shape->edge_name);
470 RUBY_ASSERT(redblack_value(inserted_node) == shape);
475 shape->ancestor_index = parent_index;
479 return shape->ancestor_index;
490shape_grow_capa(attr_index_t current_capa)
492 const attr_index_t *capacities = rb_shape_tree.capacities;
496 while ((
capa = *capacities)) {
497 if (
capa > current_capa) {
503 return (attr_index_t)rb_malloc_grow_capa(current_capa,
sizeof(
VALUE));
507rb_shape_alloc_new_child(
ID id,
rb_shape_t *shape,
enum shape_type shape_type)
509 rb_shape_t *new_shape = rb_shape_alloc(
id, shape, shape_type);
510 if (!new_shape)
return NULL;
512 switch (shape_type) {
515 if (UNLIKELY(shape->next_field_index >= shape->capacity)) {
516 RUBY_ASSERT(shape->next_field_index == shape->capacity);
517 new_shape->capacity = shape_grow_capa(shape->capacity);
519 RUBY_ASSERT(new_shape->capacity > shape->next_field_index);
520 new_shape->next_field_index = shape->next_field_index + 1;
521 if (new_shape->next_field_index > ANCESTOR_CACHE_THRESHOLD) {
523 redblack_cache_ancestors(new_shape);
528 rb_bug(
"Unreachable");
536get_next_shape_internal_atomic(
rb_shape_t *shape,
ID id,
enum shape_type shape_type,
bool *variation_created,
bool new_variations_allowed)
540 *variation_created =
false;
544 edges_table = RUBY_ATOMIC_VALUE_LOAD(shape->edges);
549 if (SINGLE_CHILD_P(edges_table)) {
550 rb_shape_t *child = SINGLE_CHILD(edges_table);
553 if (child->edge_name ==
id) {
560 if (rb_managed_id_table_lookup(edges_table,
id, &lookup_result)) {
567 if (!res && new_variations_allowed) {
570 rb_shape_t *new_shape = rb_shape_alloc_new_child(
id, shape, shape_type);
576 new_edges = TAG_SINGLE_CHILD(new_shape);
580 if (SINGLE_CHILD_P(edges_table)) {
581 rb_shape_t *old_child = SINGLE_CHILD(edges_table);
582 new_edges = rb_managed_id_table_new(2);
583 rb_managed_id_table_insert(new_edges, old_child->edge_name, (
VALUE)old_child);
586 new_edges = rb_managed_id_table_dup(edges_table);
589 rb_managed_id_table_insert(new_edges, new_shape->edge_name, (
VALUE)new_shape);
590 *variation_created =
true;
607get_next_shape_internal(
rb_shape_t *shape,
ID id,
enum shape_type shape_type,
bool *variation_created,
bool new_variations_allowed)
609 if (rb_multi_ractor_p()) {
610 return get_next_shape_internal_atomic(shape,
id, shape_type, variation_created, new_variations_allowed);
614 *variation_created =
false;
616 VALUE edges_table = shape->edges;
621 if (SINGLE_CHILD_P(edges_table)) {
622 rb_shape_t *child = SINGLE_CHILD(edges_table);
625 if (child->edge_name ==
id) {
632 if (rb_managed_id_table_lookup(edges_table,
id, &lookup_result)) {
642 if (!new_variations_allowed || rb_shapes_count() > MAX_SHAPE_ID) {
646 rb_shape_t *new_shape = rb_shape_alloc_new_child(
id, shape, shape_type);
650 shape->edges = TAG_SINGLE_CHILD(new_shape);
654 if (SINGLE_CHILD_P(edges_table)) {
655 rb_shape_t *old_child = SINGLE_CHILD(edges_table);
656 VALUE new_edges = rb_managed_id_table_new(2);
657 rb_managed_id_table_insert(new_edges, old_child->edge_name, (
VALUE)old_child);
661 rb_managed_id_table_insert(shape->edges, new_shape->edge_name, (
VALUE)new_shape);
662 *variation_created =
true;
675 if (shape->parent_id == INVALID_SHAPE_ID) {
678 *removed_shape = NULL;
682 if (shape->type == SHAPE_IVAR && shape->edge_name ==
id) {
683 *removed_shape = shape;
685 return RSHAPE(shape->parent_id);
689 rb_shape_t *new_parent = remove_shape_recursive(RSHAPE(shape->parent_id),
id, removed_shape);
695 rb_shape_t *new_child = get_next_shape_internal(new_parent, shape->edge_name, shape->type, &dont_care,
true);
696 RUBY_ASSERT(!new_child || new_child->capacity <= shape->capacity);
708static inline shape_id_t transition_complex(shape_id_t shape_id);
711shape_transition_object_id(shape_id_t original_shape_id)
713 RUBY_ASSERT(!rb_shape_has_object_id(original_shape_id));
716 rb_shape_t *shape = get_next_shape_internal(RSHAPE(original_shape_id), id_object_id, SHAPE_OBJ_ID, &dont_care,
true);
718 shape = RSHAPE(ROOT_SHAPE_WITH_OBJ_ID);
722 return shape_id(shape, original_shape_id) | SHAPE_ID_FL_HAS_OBJECT_ID;
726rb_shape_transition_object_id(
VALUE obj)
728 return shape_transition_object_id(RBASIC_SHAPE_ID(obj));
732rb_shape_object_id(shape_id_t original_shape_id)
734 RUBY_ASSERT(rb_shape_has_object_id(original_shape_id));
736 rb_shape_t *shape = RSHAPE(original_shape_id);
737 while (shape->type != SHAPE_OBJ_ID) {
738 if (UNLIKELY(shape->parent_id == INVALID_SHAPE_ID)) {
739 rb_bug(
"Missing object_id in shape tree");
741 shape = RSHAPE(shape->parent_id);
744 return shape_id(shape, original_shape_id) | SHAPE_ID_FL_HAS_OBJECT_ID;
747static inline shape_id_t
748transition_complex(shape_id_t shape_id)
750 uint8_t heap_index = rb_shape_heap_index(shape_id);
751 shape_id_t next_shape_id;
754 next_shape_id = rb_shape_root(heap_index - 1) | SHAPE_ID_FL_TOO_COMPLEX;
755 if (rb_shape_has_object_id(shape_id)) {
756 next_shape_id = shape_transition_object_id(next_shape_id);
760 if (rb_shape_has_object_id(shape_id)) {
761 next_shape_id = ROOT_TOO_COMPLEX_WITH_OBJ_ID | (shape_id & SHAPE_ID_FLAGS_MASK);
764 next_shape_id = ROOT_TOO_COMPLEX_SHAPE_ID | (shape_id & SHAPE_ID_FLAGS_MASK);
768 RUBY_ASSERT(rb_shape_has_object_id(shape_id) == rb_shape_has_object_id(next_shape_id));
770 return next_shape_id;
774rb_shape_transition_remove_ivar(
VALUE obj,
ID id, shape_id_t *removed_shape_id)
776 shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj);
778 RUBY_ASSERT(!rb_shape_too_complex_p(original_shape_id));
782 rb_shape_t *new_shape = remove_shape_recursive(RSHAPE(original_shape_id),
id, &removed_shape);
785 *removed_shape_id = raw_shape_id(removed_shape);
789 return shape_id(new_shape, original_shape_id);
791 else if (removed_shape) {
794 shape_id_t next_shape_id = transition_complex(original_shape_id);
795 RUBY_ASSERT(rb_shape_has_object_id(next_shape_id) == rb_shape_has_object_id(original_shape_id));
796 return next_shape_id;
798 return original_shape_id;
802rb_shape_transition_frozen(
VALUE obj)
806 shape_id_t shape_id = rb_obj_shape_id(obj);
807 return shape_id | SHAPE_ID_FL_FROZEN;
811rb_shape_transition_complex(
VALUE obj)
813 return transition_complex(RBASIC_SHAPE_ID(obj));
817rb_shape_transition_heap(
VALUE obj,
size_t heap_index)
819 return (RBASIC_SHAPE_ID(obj) & (~SHAPE_ID_HEAP_INDEX_MASK)) | rb_shape_root(heap_index);
823rb_set_namespaced_class_shape_id(
VALUE obj, shape_id_t shape_id)
825 RBASIC_SET_SHAPE_ID(RCLASS_WRITABLE_ENSURE_FIELDS_OBJ(obj), shape_id);
827 RBASIC_SET_SHAPE_ID(obj, shape_id);
839 return get_next_shape_internal(shape,
id, SHAPE_IVAR, &dont_care,
true);
843rb_shape_get_next_iv_shape(shape_id_t shape_id,
ID id)
846 rb_shape_t *next_shape = shape_get_next_iv_shape(shape,
id);
848 return INVALID_SHAPE_ID;
850 return raw_shape_id(next_shape);
854shape_get_iv_index(
rb_shape_t *shape,
ID id, attr_index_t *value)
856 while (shape->parent_id != INVALID_SHAPE_ID) {
857 if (shape->edge_name ==
id) {
858 enum shape_type shape_type;
859 shape_type = (
enum shape_type)shape->type;
861 switch (shape_type) {
864 *value = shape->next_field_index - 1;
869 rb_bug(
"Ivar should not exist on transition");
873 shape = RSHAPE(shape->parent_id);
886 if (shape_get_iv_index(shape,
id, &index)) {
887 rb_bug(
"rb_shape_get_next: trying to create ivar that already exists at index %u", index);
892 if (IMEMO_TYPE_P(obj, imemo_fields)) {
899 bool allow_new_shape = RCLASS_VARIATION_COUNT(klass) < SHAPE_MAX_VARIATIONS;
900 bool variation_created =
false;
901 rb_shape_t *new_shape = get_next_shape_internal(shape,
id, SHAPE_IVAR, &variation_created, allow_new_shape);
909 if (obj != klass && new_shape->next_field_index > RCLASS_MAX_IV_COUNT(klass)) {
910 RCLASS_SET_MAX_IV_COUNT(klass, new_shape->next_field_index);
913 if (variation_created) {
914 RCLASS_VARIATION_COUNT(klass)++;
917 if (RCLASS_VARIATION_COUNT(klass) >= SHAPE_MAX_VARIATIONS) {
920 "The class %"PRIsVALUE
" reached %d shape variations, instance variables accesses will be slower and memory usage increased.\n"
921 "It is recommended to define instance variables in a consistent order, for instance by eagerly defining them all in the #initialize method.",
933rb_shape_transition_add_ivar(
VALUE obj,
ID id)
935 shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj);
938 rb_shape_t *next_shape = shape_get_next(RSHAPE(original_shape_id), obj,
id,
true);
940 return shape_id(next_shape, original_shape_id);
943 return transition_complex(original_shape_id);
948rb_shape_transition_add_ivar_no_warnings(
VALUE obj,
ID id)
950 shape_id_t original_shape_id = RBASIC_SHAPE_ID(obj);
953 rb_shape_t *next_shape = shape_get_next(RSHAPE(original_shape_id), obj,
id,
false);
955 return shape_id(next_shape, original_shape_id);
958 return transition_complex(original_shape_id);
965rb_shape_get_iv_index_with_hint(shape_id_t shape_id,
ID id, attr_index_t *value, shape_id_t *shape_id_hint)
967 attr_index_t index_hint = *value;
969 if (*shape_id_hint == INVALID_SHAPE_ID) {
970 *shape_id_hint = shape_id;
971 return rb_shape_get_iv_index(shape_id,
id, value);
976 rb_shape_t *shape_hint = RSHAPE(*shape_id_hint);
983 if (shape->ancestor_index && shape->next_field_index >= ANCESTOR_CACHE_THRESHOLD) {
984 depth = ANCESTOR_SEARCH_MAX_DEPTH;
987 while (depth > 0 && shape->next_field_index > index_hint) {
988 while (shape_hint->next_field_index > shape->next_field_index) {
989 shape_hint = RSHAPE(shape_hint->parent_id);
992 if (shape_hint == shape) {
995 *shape_id_hint = raw_shape_id(shape);
998 if (shape->edge_name ==
id) {
1000 *value = shape->next_field_index - 1;
1001 *shape_id_hint = raw_shape_id(shape);
1005 shape = RSHAPE(shape->parent_id);
1011 if (!shape->ancestor_index && initial_shape->ancestor_index) {
1012 shape = initial_shape;
1014 *shape_id_hint = shape_id;
1015 return shape_get_iv_index(shape,
id, value);
1021 if (shape->ancestor_index && shape->next_field_index >= ANCESTOR_CACHE_THRESHOLD) {
1024 *ivar_shape = redblack_value(node);
1036 while (shape->parent_id != INVALID_SHAPE_ID) {
1037 if (shape->edge_name ==
id) {
1039 *ivar_shape = shape;
1043 shape = RSHAPE(shape->parent_id);
1050rb_shape_find_ivar(shape_id_t current_shape_id,
ID id, shape_id_t *ivar_shape_id)
1052 RUBY_ASSERT(!rb_shape_too_complex_p(current_shape_id));
1054 rb_shape_t *shape = RSHAPE(current_shape_id);
1057 if (!shape_cache_find_ivar(shape,
id, &ivar_shape)) {
1059 if (shape->ancestor_index && shape->next_field_index >= ANCESTOR_CACHE_THRESHOLD) {
1063 if (!shape_find_ivar(shape,
id, &ivar_shape)) {
1069 *ivar_shape_id = shape_id(ivar_shape, current_shape_id);
1075rb_shape_get_iv_index(shape_id_t shape_id,
ID id, attr_index_t *value)
1081 shape_id_t ivar_shape_id;
1082 if (rb_shape_find_ivar(shape_id,
id, &ivar_shape_id)) {
1083 *value = RSHAPE_INDEX(ivar_shape_id);
1090rb_shape_id_offset(
void)
1092 return sizeof(uintptr_t) - SHAPE_ID_NUM_BITS /
sizeof(uintptr_t);
1105 if (dest_shape->type != initial_shape->type) {
1106 midway_shape = shape_rebuild(initial_shape, RSHAPE(dest_shape->parent_id));
1107 if (UNLIKELY(!midway_shape)) {
1112 midway_shape = initial_shape;
1115 switch ((
enum shape_type)dest_shape->type) {
1117 midway_shape = shape_get_next_iv_shape(midway_shape, dest_shape->edge_name);
1124 return midway_shape;
1130rb_shape_rebuild(shape_id_t initial_shape_id, shape_id_t dest_shape_id)
1132 RUBY_ASSERT(!rb_shape_too_complex_p(initial_shape_id));
1133 RUBY_ASSERT(!rb_shape_too_complex_p(dest_shape_id));
1135 rb_shape_t *next_shape = shape_rebuild(RSHAPE(initial_shape_id), RSHAPE(dest_shape_id));
1137 return shape_id(next_shape, initial_shape_id);
1140 return transition_complex(initial_shape_id | (dest_shape_id & SHAPE_ID_FL_HAS_OBJECT_ID));
1145rb_shape_copy_fields(
VALUE dest,
VALUE *dest_buf, shape_id_t dest_shape_id,
VALUE *src_buf, shape_id_t src_shape_id)
1147 rb_shape_t *dest_shape = RSHAPE(dest_shape_id);
1148 rb_shape_t *src_shape = RSHAPE(src_shape_id);
1150 if (src_shape->next_field_index == dest_shape->next_field_index) {
1152 MEMCPY(dest_buf, src_buf,
VALUE, dest_shape->next_field_index);
1155 for (uint32_t i = 0; i < dest_shape->next_field_index; i++) {
1160 while (src_shape->parent_id != INVALID_SHAPE_ID) {
1161 if (src_shape->type == SHAPE_IVAR) {
1162 while (dest_shape->edge_name != src_shape->edge_name) {
1163 if (UNLIKELY(dest_shape->parent_id == INVALID_SHAPE_ID)) {
1164 rb_bug(
"Lost field %s", rb_id2name(src_shape->edge_name));
1166 dest_shape = RSHAPE(dest_shape->parent_id);
1169 RB_OBJ_WRITE(dest, &dest_buf[dest_shape->next_field_index - 1], src_buf[src_shape->next_field_index - 1]);
1171 src_shape = RSHAPE(src_shape->parent_id);
1177rb_shape_copy_complex_ivars(
VALUE dest,
VALUE obj, shape_id_t src_shape_id,
st_table *fields_table)
1180 st_table *table = st_copy(fields_table);
1181 if (rb_shape_has_object_id(src_shape_id)) {
1182 st_data_t
id = (st_data_t)id_object_id;
1183 st_delete(table, &
id, NULL);
1185 rb_obj_init_too_complex(dest, table);
1189rb_shape_edges_count(shape_id_t shape_id)
1193 if (SINGLE_CHILD_P(shape->edges)) {
1197 return rb_managed_id_table_size(shape->edges);
1204rb_shape_memsize(shape_id_t shape_id)
1209 if (shape->edges && !SINGLE_CHILD_P(shape->edges)) {
1210 memsize += rb_managed_id_table_size(shape->edges);
1216rb_shape_foreach_field(shape_id_t initial_shape_id, rb_shape_foreach_transition_callback func,
void *data)
1218 RUBY_ASSERT(!rb_shape_too_complex_p(initial_shape_id));
1220 rb_shape_t *shape = RSHAPE(initial_shape_id);
1221 if (shape->type == SHAPE_ROOT) {
1225 shape_id_t parent_id = shape_id(RSHAPE(shape->parent_id), initial_shape_id);
1226 if (rb_shape_foreach_field(parent_id, func, data)) {
1227 switch (func(shape_id(shape, initial_shape_id), data)) {
1234 rb_bug(
"unreachable");
1242rb_shape_verify_consistency(
VALUE obj, shape_id_t shape_id)
1244 if (shape_id == INVALID_SHAPE_ID) {
1245 rb_bug(
"Can't set INVALID_SHAPE_ID on an object");
1250 bool has_object_id =
false;
1251 while (shape->parent_id != INVALID_SHAPE_ID) {
1252 if (shape->type == SHAPE_OBJ_ID) {
1253 has_object_id =
true;
1256 shape = RSHAPE(shape->parent_id);
1259 if (rb_shape_has_object_id(shape_id)) {
1260 if (!has_object_id) {
1262 rb_bug(
"shape_id claim having obj_id but doesn't shape_id=%u, obj=%s", shape_id, rb_obj_info(obj));
1266 if (has_object_id) {
1268 rb_bug(
"shape_id claim not having obj_id but it does shape_id=%u, obj=%s", shape_id, rb_obj_info(obj));
1273 if (rb_shape_too_complex_p(shape_id)) {
1277 attr_index_t ivar_count = RSHAPE_LEN(shape_id);
1278 if (has_object_id) {
1285 RUBY_ASSERT(!(shape_id & SHAPE_ID_HAS_IVAR_MASK));
1289 uint8_t flags_heap_index = rb_shape_heap_index(shape_id);
1292 size_t shape_id_slot_size = rb_shape_tree.capacities[flags_heap_index - 1] *
sizeof(
VALUE) +
sizeof(
struct RBasic);
1293 size_t actual_slot_size = rb_gc_obj_slot_size(obj);
1295 if (shape_id_slot_size != actual_slot_size) {
1296 rb_bug(
"shape_id heap_index flags mismatch: shape_id_slot_size=%zu, gc_slot_size=%zu\n", shape_id_slot_size, actual_slot_size);
1300 if (flags_heap_index) {
1301 rb_bug(
"shape_id indicate heap_index > 0 but object is not T_OBJECT: %s", rb_obj_info(obj));
1316shape_too_complex(
VALUE self)
1319 return RBOOL(rb_shape_too_complex_p(shape_id));
1323shape_frozen(
VALUE self)
1326 return RBOOL(shape_id & SHAPE_ID_FL_FROZEN);
1330shape_has_object_id_p(
VALUE self)
1333 return RBOOL(rb_shape_has_object_id(shape_id));
1339 if (is_instance_id(key)) {
1348shape_id_t_to_rb_cShape(shape_id_t shape_id)
1355 INT2NUM(shape_id & SHAPE_ID_OFFSET_MASK),
1357 rb_shape_edge_name(shape),
1358 INT2NUM(shape->next_field_index),
1359 INT2NUM(rb_shape_heap_index(shape_id)),
1361 INT2NUM(RSHAPE_CAPACITY(shape_id)));
1366static enum rb_id_table_iterator_result
1367rb_edges_to_hash(
ID key,
VALUE value,
void *ref)
1369 rb_hash_aset(*(
VALUE *)ref, parse_key(key), shape_id_t_to_rb_cShape(raw_shape_id((
rb_shape_t *)value)));
1370 return ID_TABLE_CONTINUE;
1374rb_shape_edges(
VALUE self)
1378 VALUE hash = rb_hash_new();
1381 if (SINGLE_CHILD_P(shape->edges)) {
1382 rb_shape_t *child = SINGLE_CHILD(shape->edges);
1383 rb_edges_to_hash(child->edge_name, (
VALUE)child, &hash);
1386 VALUE edges = shape->edges;
1387 rb_managed_id_table_foreach(edges, rb_edges_to_hash, &hash);
1398 if (shape->edge_name) {
1399 if (is_instance_id(shape->edge_name)) {
1400 return ID2SYM(shape->edge_name);
1402 return INT2NUM(shape->capacity);
1408rb_shape_export_depth(
VALUE self)
1411 return SIZET2NUM(rb_shape_depth(shape_id));
1415rb_shape_parent(
VALUE self)
1419 if (shape->parent_id != INVALID_SHAPE_ID) {
1420 return shape_id_t_to_rb_cShape(shape->parent_id);
1430 return shape_id_t_to_rb_cShape(rb_obj_shape_id(obj));
1434rb_shape_root_shape(
VALUE self)
1436 return shape_id_t_to_rb_cShape(ROOT_SHAPE_ID);
1440rb_shape_shapes_available(
VALUE self)
1442 return INT2NUM(MAX_SHAPE_ID - (rb_shapes_count() - 1));
1446rb_shape_exhaust(
int argc,
VALUE *argv,
VALUE self)
1449 int offset = argc == 1 ?
NUM2INT(argv[0]) : 0;
1450 RUBY_ATOMIC_SET(rb_shape_tree.next_shape_id, MAX_SHAPE_ID - offset + 1);
1456static enum rb_id_table_iterator_result collect_keys_and_values(
ID key,
VALUE value,
void *ref)
1458 rb_hash_aset(*(
VALUE *)ref, parse_key(key), shape_to_h((
rb_shape_t *)value));
1459 return ID_TABLE_CONTINUE;
1464 VALUE hash = rb_hash_new();
1466 if (SINGLE_CHILD_P(edges)) {
1468 collect_keys_and_values(child->edge_name, (
VALUE)child, &hash);
1471 rb_managed_id_table_foreach(edges, collect_keys_and_values, &hash);
1483 rb_hash_aset(
rb_shape,
ID2SYM(rb_intern(
"edges")), edges(shape->edges));
1485 if (shape == rb_shape_get_root_shape()) {
1492 rb_hash_aset(
rb_shape,
ID2SYM(rb_intern(
"edge_name")), rb_id2str(shape->edge_name));
1497shape_transition_tree(
VALUE self)
1499 return shape_to_h(rb_shape_get_root_shape());
1505 shape_id_t shape_id =
NUM2UINT(
id);
1506 if (shape_id >= rb_shapes_count()) {
1507 rb_raise(rb_eArgError,
"Shape ID %d is out of bounds\n", shape_id);
1509 return shape_id_t_to_rb_cShape(shape_id);
1514#include <sys/mman.h>
1518Init_default_shapes(
void)
1520 size_t *heap_sizes = rb_gc_heap_sizes();
1521 size_t heaps_count = 0;
1522 while (heap_sizes[heaps_count]) {
1525 attr_index_t *capacities =
ALLOC_N(attr_index_t, heaps_count + 1);
1526 capacities[heaps_count] = 0;
1528 for (index = 0; index < heaps_count; index++) {
1529 capacities[index] = (heap_sizes[index] -
sizeof(
struct RBasic)) / sizeof(
VALUE);
1531 rb_shape_tree.capacities = capacities;
1535 rb_shape_tree.shape_list = (
rb_shape_t *)mmap(NULL, shape_list_mmap_size,
1536 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1537 if (rb_shape_tree.shape_list == MAP_FAILED) {
1538 rb_shape_tree.shape_list = 0;
1541 ruby_annotate_mmap(rb_shape_tree.shape_list, shape_list_mmap_size,
"Ruby:Init_default_shapes:shape_list");
1547 if (!rb_shape_tree.shape_list) {
1551 id_object_id = rb_make_internal_id();
1555 rb_shape_tree.shape_cache = (
redblack_node_t *)mmap(NULL, shape_cache_mmap_size,
1556 PROT_READ | PROT_WRITE, MAP_PRIVATE | MAP_ANONYMOUS, -1, 0);
1557 rb_shape_tree.cache_size = 0;
1562 if (rb_shape_tree.shape_cache == MAP_FAILED) {
1563 rb_shape_tree.shape_cache = 0;
1564 rb_shape_tree.cache_size = REDBLACK_CACHE_SIZE;
1567 ruby_annotate_mmap(rb_shape_tree.shape_cache, shape_cache_mmap_size,
"Ruby:Init_default_shapes:shape_cache");
1571 rb_gc_register_address(&shape_tree_obj);
1575 rb_shape_t *root = rb_shape_alloc_with_parent_id(0, INVALID_SHAPE_ID);
1577 root->type = SHAPE_ROOT;
1578 rb_shape_tree.root_shape = root;
1579 RUBY_ASSERT(raw_shape_id(rb_shape_tree.root_shape) == ROOT_SHAPE_ID);
1580 RUBY_ASSERT(!(raw_shape_id(rb_shape_tree.root_shape) & SHAPE_ID_HAS_IVAR_MASK));
1583 rb_shape_t *root_with_obj_id = get_next_shape_internal(root, id_object_id, SHAPE_OBJ_ID, &dontcare,
true);
1585 RUBY_ASSERT(raw_shape_id(root_with_obj_id) == ROOT_SHAPE_WITH_OBJ_ID);
1586 RUBY_ASSERT(root_with_obj_id->type == SHAPE_OBJ_ID);
1587 RUBY_ASSERT(root_with_obj_id->edge_name == id_object_id);
1588 RUBY_ASSERT(root_with_obj_id->next_field_index == 1);
1589 RUBY_ASSERT(!(raw_shape_id(root_with_obj_id) & SHAPE_ID_HAS_IVAR_MASK));
1590 (void)root_with_obj_id;
1594rb_shape_free_all(
void)
1596 xfree((
void *)rb_shape_tree.capacities);
1623 rb_define_const(rb_cShape,
"SHAPE_ROOT",
INT2NUM(SHAPE_ROOT));
1624 rb_define_const(rb_cShape,
"SHAPE_IVAR",
INT2NUM(SHAPE_IVAR));
1625 rb_define_const(rb_cShape,
"SHAPE_ID_NUM_BITS",
INT2NUM(SHAPE_ID_NUM_BITS));
1626 rb_define_const(rb_cShape,
"SHAPE_FLAG_SHIFT",
INT2NUM(SHAPE_FLAG_SHIFT));
1627 rb_define_const(rb_cShape,
"SPECIAL_CONST_SHAPE_ID",
INT2NUM(SPECIAL_CONST_SHAPE_ID));
1628 rb_define_const(rb_cShape,
"SHAPE_MAX_VARIATIONS",
INT2NUM(SHAPE_MAX_VARIATIONS));
1631 rb_define_const(rb_cShape,
"SHAPE_BUFFER_SIZE",
INT2NUM(
sizeof(
rb_shape_t) * SHAPE_BUFFER_SIZE));
1632 rb_define_const(rb_cShape,
"REDBLACK_CACHE_SIZE",
INT2NUM(
sizeof(
redblack_node_t) * REDBLACK_CACHE_SIZE));
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_ATOMIC_VALUE_CAS(var, oldval, newval)
Identical to RUBY_ATOMIC_CAS, except it expects its arguments are VALUE.
#define RUBY_ATOMIC_CAS(var, oldval, newval)
Atomic compare-and-swap.
#define RUBY_ATOMIC_LOAD(var)
Atomic load.
#define RUBY_ATOMIC_SET(var, val)
Identical to RUBY_ATOMIC_EXCHANGE, except for the return type.
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_singleton_method(klass, mid, func, arity)
Defines klass.mid.
static bool RB_OBJ_FROZEN(VALUE obj)
Checks if an object is frozen.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define ID2SYM
Old name of RB_ID2SYM.
#define CLASS_OF
Old name of rb_class_of.
#define SIZET2NUM
Old name of RB_SIZE2NUM.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define NUM2UINT
Old name of RB_NUM2UINT.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define NUM2INT
Old name of RB_NUM2INT.
#define INT2NUM
Old name of RB_INT2NUM.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define xcalloc
Old name of ruby_xcalloc.
void rb_category_warn(rb_warning_category_t category, const char *fmt,...)
Identical to rb_category_warning(), except it reports unless $VERBOSE is nil.
VALUE rb_eRuntimeError
RuntimeError exception.
@ RB_WARN_CATEGORY_PERFORMANCE
Warning is for performance issues (not enabled by -w).
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_obj_freeze(VALUE obj)
Just calls rb_obj_freeze_inline() inside.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_struct_define_under(VALUE space, const char *name,...)
Identical to rb_struct_define(), except it defines the class under the specified namespace instead of...
VALUE rb_struct_new(VALUE klass,...)
Creates an instance of the given struct.
VALUE rb_struct_getmember(VALUE self, ID key)
Identical to rb_struct_aref(), except it takes ID instead of VALUE.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int capa
Designed capacity of the buffer.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define TypedData_Wrap_Struct(klass, data_type, sval)
Converts sval, a pointer to your struct, into a Ruby object.
void rb_p(VALUE obj)
Inspects an object.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
Ruby object's base components.
This is the struct that holds necessary info for a struct.
const char * wrap_struct_name
Name of structs of this kind.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
uintptr_t VALUE
Type that represents a Ruby object.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.