5#include "internal/imemo.h"
6#include "internal/st.h"
7#include "vm_callinfo.h"
9size_t rb_iseq_memsize(
const rb_iseq_t *iseq);
10void rb_iseq_mark_and_move(
rb_iseq_t *iseq,
bool reference_updating);
14rb_imemo_name(
enum imemo_type
type)
18#define IMEMO_NAME(x) case imemo_##x: return #x;
20 IMEMO_NAME(callcache);
22 IMEMO_NAME(constcache);
29 IMEMO_NAME(parser_strterm);
31 IMEMO_NAME(throw_data);
36 rb_bug(
"unreachable");
44rb_imemo_new(
enum imemo_type
type,
VALUE v0,
size_t size)
47 NEWOBJ_OF(obj,
void, v0, flags, size, 0);
53rb_imemo_tmpbuf_new(
void)
63rb_alloc_tmp_buffer_with_count(
volatile VALUE *store,
size_t size,
size_t cnt)
70 tmpbuf = rb_imemo_tmpbuf_new();
71 *store = (
VALUE)tmpbuf;
72 ptr = ruby_xmalloc(size);
80rb_alloc_tmp_buffer(
volatile VALUE *store,
long len)
84 if (
len < 0 || (cnt = (
long)roomof(
len,
sizeof(
VALUE))) < 0) {
85 rb_raise(rb_eArgError,
"negative buffer size (or size too big)");
88 return rb_alloc_tmp_buffer_with_count(store,
len, cnt);
92rb_free_tmp_buffer(
volatile VALUE *store)
96 void *ptr = ATOMIC_PTR_EXCHANGE(s->ptr, 0);
107 tmpbuf->next = old_heap;
114imemo_fields_new(
VALUE klass,
size_t capa)
117 if (rb_gc_size_allocatable_p(embedded_size)) {
118 VALUE fields = rb_imemo_new(imemo_fields, klass, embedded_size);
123 VALUE fields = rb_imemo_new(imemo_fields, klass,
sizeof(
struct rb_fields));
131rb_imemo_fields_new(
VALUE klass,
size_t capa)
133 return imemo_fields_new(klass,
capa);
137imemo_fields_new_complex(
VALUE klass,
size_t capa)
140 IMEMO_OBJ_FIELDS(fields)->as.complex.table = st_init_numtable_with_size(
capa);
145rb_imemo_fields_new_complex(
VALUE klass,
size_t capa)
147 return imemo_fields_new_complex(klass,
capa);
151imemo_fields_trigger_wb_i(st_data_t key, st_data_t value, st_data_t arg)
159imemo_fields_complex_wb_i(st_data_t key, st_data_t value, st_data_t arg)
169 IMEMO_OBJ_FIELDS(fields)->as.complex.table = tbl;
170 st_foreach(tbl, imemo_fields_trigger_wb_i, (st_data_t)fields);
175rb_imemo_fields_clone(
VALUE fields_obj)
177 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
180 if (rb_shape_too_complex_p(shape_id)) {
181 clone = rb_imemo_fields_new_complex(
CLASS_OF(fields_obj), 0);
182 RBASIC_SET_SHAPE_ID(clone, shape_id);
183 st_table *src_table = rb_imemo_fields_complex_tbl(fields_obj);
184 st_table *dest_table = rb_imemo_fields_complex_tbl(clone);
185 st_replace(dest_table, src_table);
186 st_foreach(dest_table, imemo_fields_complex_wb_i, (st_data_t)clone);
189 clone = imemo_fields_new(
CLASS_OF(fields_obj), RSHAPE_CAPACITY(shape_id));
190 RBASIC_SET_SHAPE_ID(clone, shape_id);
191 VALUE *fields = rb_imemo_fields_ptr(clone);
192 attr_index_t fields_count = RSHAPE_LEN(shape_id);
193 MEMCPY(fields, rb_imemo_fields_ptr(fields_obj),
VALUE, fields_count);
194 for (attr_index_t i = 0; i < fields_count; i++) {
203rb_imemo_fields_clear(
VALUE fields_obj)
207 if (rb_shape_obj_too_complex_p(fields_obj)) {
208 RBASIC_SET_SHAPE_ID(fields_obj, ROOT_TOO_COMPLEX_SHAPE_ID);
211 RBASIC_SET_SHAPE_ID(fields_obj, ROOT_SHAPE_ID);
220rb_imemo_memsize(
VALUE obj)
223 switch (imemo_type(obj)) {
225 rb_bug(
"imemo_ast is obsolete");
228 case imemo_callcache:
232 case imemo_constcache:
243 size += rb_iseq_memsize((
rb_iseq_t *)obj);
252 case imemo_parser_strterm:
256 case imemo_throw_data:
263 if (rb_shape_obj_too_complex_p(obj)) {
264 size += st_memsize(IMEMO_OBJ_FIELDS(obj)->as.complex.table);
267 size += RSHAPE_CAPACITY(RBASIC_SHAPE_ID(obj)) *
sizeof(
VALUE);
271 rb_bug(
"unreachable");
282moved_or_living_object_strictly_p(
VALUE obj)
292 rb_gc_mark_and_move(&ment->owner);
293 rb_gc_mark_and_move(&ment->defined_class);
297 case VM_METHOD_TYPE_ISEQ:
299 rb_gc_mark_and_move_ptr(&def->body.iseq.
iseqptr);
301 rb_gc_mark_and_move_ptr(&def->body.iseq.
cref);
303 if (!reference_updating) {
304 if (def->iseq_overload && ment->defined_class) {
307 rb_gc_mark((
VALUE)ment);
311 case VM_METHOD_TYPE_ATTRSET:
312 case VM_METHOD_TYPE_IVAR:
313 rb_gc_mark_and_move(&def->body.attr.location);
315 case VM_METHOD_TYPE_BMETHOD:
316 rb_gc_mark_and_move(&def->body.bmethod.proc);
317 if (!reference_updating) {
318 if (def->body.bmethod.hooks) rb_hook_list_mark(def->body.bmethod.hooks);
321 case VM_METHOD_TYPE_ALIAS:
322 rb_gc_mark_and_move_ptr(&def->body.alias.original_me);
324 case VM_METHOD_TYPE_REFINED:
325 rb_gc_mark_and_move_ptr(&def->body.refined.orig_me);
327 case VM_METHOD_TYPE_CFUNC:
328 case VM_METHOD_TYPE_ZSUPER:
329 case VM_METHOD_TYPE_MISSING:
330 case VM_METHOD_TYPE_OPTIMIZED:
331 case VM_METHOD_TYPE_UNDEF:
332 case VM_METHOD_TYPE_NOTIMPLEMENTED:
339rb_imemo_mark_and_move(
VALUE obj,
bool reference_updating)
341 switch (imemo_type(obj)) {
343 rb_bug(
"imemo_ast is obsolete");
346 case imemo_callcache: {
367 if (reference_updating) {
372 if (moved_or_living_object_strictly_p(cc->klass) &&
373 moved_or_living_object_strictly_p((
VALUE)cc->cme_)) {
374 *((
VALUE *)&cc->klass) = rb_gc_location(cc->klass);
379 vm_cc_invalidate(cc);
384 if (cc->klass && (vm_cc_super_p(cc) || vm_cc_refinement_p(cc))) {
385 rb_gc_mark_movable((
VALUE)cc->cme_);
386 rb_gc_mark_movable((
VALUE)cc->klass);
394 case imemo_constcache: {
397 rb_gc_mark_and_move(&ice->value);
404 rb_gc_mark_and_move(&cref->klass_or_self);
405 rb_gc_mark_and_move_ptr(&cref->next);
406 rb_gc_mark_and_move(&cref->refinements);
413 if (LIKELY(env->ep)) {
415 RUBY_ASSERT(rb_gc_location(env->ep[VM_ENV_DATA_INDEX_ENV]) == rb_gc_location(obj));
416 RUBY_ASSERT(reference_updating || VM_ENV_ESCAPED_P(env->ep));
418 for (
unsigned int i = 0; i < env->env_size; i++) {
419 rb_gc_mark_and_move((
VALUE *)&env->env[i]);
422 rb_gc_mark_and_move_ptr(&env->iseq);
424 if (reference_updating) {
425 ((
VALUE *)env->ep)[VM_ENV_DATA_INDEX_ENV] = rb_gc_location(env->ep[VM_ENV_DATA_INDEX_ENV]);
428 if (!VM_ENV_FLAGS(env->ep, VM_ENV_FLAG_WB_REQUIRED)) {
429 VM_ENV_FLAGS_SET(env->ep, VM_ENV_FLAG_WB_REQUIRED);
431 rb_gc_mark_movable( (
VALUE)rb_vm_env_prev_env(env));
440 if (!reference_updating) {
441 rb_gc_mark_maybe((
VALUE)ifunc->data);
447 rb_iseq_mark_and_move((
rb_iseq_t *)obj, reference_updating);
450 struct MEMO *memo = (
struct MEMO *)obj;
452 rb_gc_mark_and_move((
VALUE *)&memo->v1);
453 rb_gc_mark_and_move((
VALUE *)&memo->v2);
454 if (!reference_updating) {
455 rb_gc_mark_maybe(memo->u3.value);
463 case imemo_parser_strterm:
469 rb_gc_mark_and_move((
VALUE *)&svar->lastline);
470 rb_gc_mark_and_move((
VALUE *)&svar->backref);
471 rb_gc_mark_and_move((
VALUE *)&svar->others);
475 case imemo_throw_data: {
478 rb_gc_mark_and_move((
VALUE *)&throw_data->throw_obj);
485 if (!reference_updating) {
487 rb_gc_mark_locations(m->ptr, m->ptr + m->cnt);
488 }
while ((m = m->next) != NULL);
496 if (rb_shape_obj_too_complex_p(obj)) {
497 st_table *tbl = rb_imemo_fields_complex_tbl(obj);
498 if (reference_updating) {
499 rb_gc_ref_update_table_values_only(tbl);
502 rb_mark_tbl_no_pin(tbl);
506 VALUE *fields = rb_imemo_fields_ptr(obj);
507 attr_index_t
len = RSHAPE_LEN(RBASIC_SHAPE_ID(obj));
508 for (attr_index_t i = 0; i <
len; i++) {
509 rb_gc_mark_and_move(&fields[i]);
515 rb_bug(
"unreachable");
523static enum rb_id_table_iterator_result
524free_const_entry_i(
VALUE value,
void *data)
528 return ID_TABLE_CONTINUE;
534 rb_id_table_foreach_values(tbl, free_const_entry_i, 0);
535 rb_id_table_free(tbl);
543 for (
int i=0; i<ccs->len; i++) {
547 if (rb_gc_pointer_to_heap_p((
VALUE)cc) &&
548 !rb_objspace_garbage_object_p((
VALUE)cc) &&
549 IMEMO_TYPE_P(cc, imemo_callcache) &&
550 cc->klass == klass) {
558 VM_ASSERT(!vm_cc_super_p(cc) && !vm_cc_refinement_p(cc));
559 vm_cc_invalidate(cc);
561 ruby_xfree(ccs->entries);
569 RB_DEBUG_COUNTER_INC(ccs_free);
570 vm_ccs_free(ccs,
true,
Qundef);
573static enum rb_id_table_iterator_result
574cc_tbl_free_i(
VALUE ccs_ptr,
void *data)
578 VM_ASSERT(vm_ccs_p(ccs));
580 vm_ccs_free(ccs,
false, klass);
582 return ID_TABLE_CONTINUE;
589 rb_id_table_foreach_values(cc_tbl, cc_tbl_free_i, (
void *)klass);
590 rb_id_table_free(cc_tbl);
594imemo_fields_free(
struct rb_fields *fields)
596 if (rb_shape_obj_too_complex_p((
VALUE)fields)) {
597 st_free_table(fields->as.complex.table);
600 xfree(fields->as.external.ptr);
605rb_imemo_free(
VALUE obj)
607 switch (imemo_type(obj)) {
609 rb_bug(
"imemo_ast is obsolete");
612 case imemo_callcache:
613 RB_DEBUG_COUNTER_INC(obj_imemo_callcache);
616 case imemo_callinfo:{
621 if (ci->kwarg->references == 0)
xfree((
void *)ci->kwarg);
623 RB_DEBUG_COUNTER_INC(obj_imemo_callinfo);
627 case imemo_constcache:
628 RB_DEBUG_COUNTER_INC(obj_imemo_constcache);
632 RB_DEBUG_COUNTER_INC(obj_imemo_cref);
640 RB_DEBUG_COUNTER_INC(obj_imemo_env);
645 RB_DEBUG_COUNTER_INC(obj_imemo_ifunc);
649 RB_DEBUG_COUNTER_INC(obj_imemo_iseq);
653 RB_DEBUG_COUNTER_INC(obj_imemo_memo);
658 RB_DEBUG_COUNTER_INC(obj_imemo_ment);
661 case imemo_parser_strterm:
662 RB_DEBUG_COUNTER_INC(obj_imemo_parser_strterm);
666 RB_DEBUG_COUNTER_INC(obj_imemo_svar);
669 case imemo_throw_data:
670 RB_DEBUG_COUNTER_INC(obj_imemo_throw_data);
675 RB_DEBUG_COUNTER_INC(obj_imemo_tmpbuf);
679 imemo_fields_free(IMEMO_OBJ_FIELDS(obj));
680 RB_DEBUG_COUNTER_INC(obj_imemo_fields);
683 rb_bug(
"unreachable");
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define xfree
Old name of ruby_xfree.
#define Qundef
Old name of RUBY_Qundef.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define CLASS_OF
Old name of rb_class_of.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define FL_WB_PROTECTED
Old name of RUBY_FL_WB_PROTECTED.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define T_MOVED
Old name of RUBY_T_MOVED.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
int capa
Designed capacity of the buffer.
int len
Length of the buffer.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RBASIC(obj)
Convenient casting macro.
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
uintptr_t VALUE
Type that represents a Ruby object.