11 #include "ruby/internal/config.h"
15 #ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20 #include "debug_counter.h"
22 #include "internal/class.h"
23 #include "internal/compar.h"
24 #include "internal/hash.h"
25 #include "internal/numeric.h"
26 #include "internal/proc.h"
27 #include "internal/random.h"
28 #include "internal/variable.h"
29 #include "internal/struct.h"
34 #include "insns_info.inc"
40 int argc,
const VALUE *argv,
int priv);
50 ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73 NORETURN(
static void vm_stackoverflow(
void));
76 vm_stackoverflow(
void)
78 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 #ifdef USE_SIGALTSTACK
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
102 #if VM_CHECK_MODE > 0
104 callable_class_p(
VALUE klass)
106 #if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147 vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237 static VALUE vm_stack_canary;
238 static bool vm_stack_canary_was_born =
false;
245 unsigned int pos = 0;
246 while (pos < ISEQ_BODY(iseq)->iseq_size) {
247 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
248 unsigned int next_pos = pos + insn_len(opcode);
249 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
254 rb_bug(
"failed to find the previous insn");
263 if (! LIKELY(vm_stack_canary_was_born)) {
266 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
270 else if (! (iseq = GET_ISEQ())) {
273 else if (LIKELY(sp[0] != vm_stack_canary)) {
282 const VALUE *orig = rb_iseq_original_iseq(iseq);
283 const VALUE iseqw = rb_iseqw_new(iseq);
285 const char *stri = rb_str_to_cstr(inspection);
286 const VALUE disasm = rb_iseq_disasm(iseq);
287 const char *strd = rb_str_to_cstr(disasm);
288 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
289 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
290 const char *name = insn_name(insn);
296 "We are killing the stack canary set by %s, "
297 "at %s@pc=%"PRIdPTR
"\n"
298 "watch out the C stack trace.\n"
300 name, stri, pos, strd);
303 #define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
306 #define vm_check_canary(ec, sp)
307 #define vm_check_frame(a, b, c, d)
310 #if USE_DEBUG_COUNTER
312 vm_push_frame_debug_counter_inc(
319 RB_DEBUG_COUNTER_INC(frame_push);
321 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
322 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
323 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
326 RB_DEBUG_COUNTER_INC(frame_R2R);
329 RB_DEBUG_COUNTER_INC(frame_R2C);
334 RB_DEBUG_COUNTER_INC(frame_C2R);
337 RB_DEBUG_COUNTER_INC(frame_C2C);
342 switch (
type & VM_FRAME_MAGIC_MASK) {
343 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
344 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
345 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
346 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
347 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
348 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
349 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
350 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
351 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
357 #define vm_push_frame_debug_counter_inc(ec, cfp, t)
362 rb_vm_stack_canary(
void)
364 #if VM_CHECK_MODE > 0
365 return vm_stack_canary;
371 STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
372 STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
373 STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
389 vm_check_frame(
type, specval, cref_or_me, iseq);
390 VM_ASSERT(local_size >= 0);
393 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
394 vm_check_canary(ec, sp);
399 for (
int i=0; i < local_size; i++) {
416 #if VM_DEBUG_BP_CHECK
426 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
427 atomic_signal_fence(memory_order_seq_cst);
435 vm_push_frame_debug_counter_inc(ec, cfp,
type);
443 if (VMDEBUG == 2) SDR();
445 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
452 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
454 if (VMDEBUG == 2) SDR();
456 RUBY_VM_CHECK_INTS(ec);
457 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
459 return flags & VM_FRAME_FLAG_FINISH;
465 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
472 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
474 rb_imemo_tmpbuf_set_ptr(tmpbuf,
ptr);
478 dmy_iseq->body = dmy_body;
479 dmy_body->type = ISEQ_TYPE_TOP;
480 dmy_body->location.pathobj = fname;
484 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
486 VM_BLOCK_HANDLER_NONE,
498 rb_arity_error_new(
int argc,
int min,
int max)
500 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
515 rb_error_arity(
int argc,
int min,
int max)
522 NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
525 vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
528 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
529 VM_FORCE_WRITE(&ep[index], v);
530 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
531 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
536 vm_env_write(
const VALUE *ep,
int index,
VALUE v)
538 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
539 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
540 VM_STACK_ENV_WRITE(ep, index, v);
543 vm_env_write_slowpath(ep, index, v);
548 rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
550 vm_env_write(ep, index, v);
556 if (block_handler == VM_BLOCK_HANDLER_NONE) {
560 switch (vm_block_handler_type(block_handler)) {
561 case block_handler_type_iseq:
562 case block_handler_type_ifunc:
563 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
564 case block_handler_type_symbol:
565 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
566 case block_handler_type_proc:
567 return VM_BH_TO_PROC(block_handler);
569 VM_UNREACHABLE(rb_vm_bh_to_procval);
576 #if VM_CHECK_MODE > 0
578 vm_svar_valid_p(
VALUE svar)
581 switch (imemo_type(svar)) {
590 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
600 if (lep && (ec == NULL || ec->root_lep != lep)) {
601 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
604 svar = ec->root_svar;
607 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
615 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
617 if (lep && (ec == NULL || ec->root_lep != lep)) {
618 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
621 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->
self, &ec->root_svar, svar);
628 const struct vm_svar *svar = lep_svar(ec, lep);
633 case VM_SVAR_LASTLINE:
634 return svar->lastline;
635 case VM_SVAR_BACKREF:
636 return svar->backref;
638 const VALUE ary = svar->others;
653 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
664 struct vm_svar *svar = lep_svar(ec, lep);
667 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
671 case VM_SVAR_LASTLINE:
674 case VM_SVAR_BACKREF:
678 VALUE ary = svar->others;
694 val = lep_svar_get(ec, lep, key);
697 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
714 rb_bug(
"unexpected back-ref");
727 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
737 return rb_reg_last_defined(backref);
739 rb_bug(
"unexpected back-ref");
743 nth = (int)(
type >> 1);
750 check_method_entry(
VALUE obj,
int can_be_svar)
752 if (obj ==
Qfalse)
return NULL;
754 #if VM_CHECK_MODE > 0
758 switch (imemo_type(obj)) {
768 #if VM_CHECK_MODE > 0
769 rb_bug(
"check_method_entry: svar should not be there:");
778 const VALUE *ep = cfp->ep;
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
783 ep = VM_ENV_PREV_EP(ep);
786 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
iseqptr;
803 switch (me->def->type) {
804 case VM_METHOD_TYPE_ISEQ:
805 return me->def->body.iseq.
cref;
811 #if VM_CHECK_MODE == 0
815 check_cref(
VALUE obj,
int can_be_svar)
817 if (obj ==
Qfalse)
return NULL;
819 #if VM_CHECK_MODE > 0
823 switch (imemo_type(obj)) {
833 #if VM_CHECK_MODE > 0
834 rb_bug(
"check_method_entry: svar should not be there:");
841 vm_env_cref(
const VALUE *ep)
845 while (!VM_ENV_LOCAL_P(ep)) {
846 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
847 ep = VM_ENV_PREV_EP(ep);
850 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
854 is_cref(
const VALUE v,
int can_be_svar)
857 switch (imemo_type(v)) {
870 vm_env_cref_by_cref(
const VALUE *ep)
872 while (!VM_ENV_LOCAL_P(ep)) {
873 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
874 ep = VM_ENV_PREV_EP(ep);
876 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
880 cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
882 const VALUE v = *vptr;
886 switch (imemo_type(v)) {
889 new_cref = vm_cref_dup(cref);
894 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
899 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
903 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
912 vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
914 if (vm_env_cref_by_cref(ep)) {
918 while (!VM_ENV_LOCAL_P(ep)) {
919 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
920 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
923 ep = VM_ENV_PREV_EP(ep);
925 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
926 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
929 rb_bug(
"vm_cref_dup: unreachable");
934 vm_get_cref(
const VALUE *ep)
942 rb_bug(
"vm_get_cref: unreachable");
947 rb_vm_get_cref(
const VALUE *ep)
949 return vm_get_cref(ep);
960 return vm_get_cref(cfp->ep);
964 vm_get_const_key_cref(
const VALUE *ep)
970 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
971 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
974 cref = CREF_NEXT(cref);
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 *new_cref_ptr = new_cref;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 *new_cref_ptr = new_cref;
995 new_cref_ptr = &new_cref->next;
997 *new_cref_ptr = NULL;
1006 prev_cref = vm_env_cref(ep);
1012 prev_cref = vm_env_cref(cfp->ep);
1016 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1020 vm_get_cbase(
const VALUE *ep)
1022 const rb_cref_t *cref = vm_get_cref(ep);
1024 return CREF_CLASS_FOR_DEFINITION(cref);
1028 vm_get_const_base(
const VALUE *ep)
1030 const rb_cref_t *cref = vm_get_cref(ep);
1033 if (!CREF_PUSHED_BY_EVAL(cref)) {
1034 return CREF_CLASS_FOR_DEFINITION(cref);
1036 cref = CREF_NEXT(cref);
1043 vm_check_if_namespace(
VALUE klass)
1051 vm_ensure_not_refinement_module(
VALUE self)
1054 rb_warn(
"not defined at the refinement, but at the outer class/module");
1070 if (
NIL_P(orig_klass) && allow_nil) {
1072 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1076 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1077 root_cref = CREF_NEXT(root_cref);
1080 while (cref && CREF_NEXT(cref)) {
1081 if (CREF_PUSHED_BY_EVAL(cref)) {
1085 klass = CREF_CLASS(cref);
1087 cref = CREF_NEXT(cref);
1089 if (!
NIL_P(klass)) {
1093 if ((ce = rb_const_lookup(klass,
id))) {
1094 rb_const_warn_if_deprecated(ce, klass,
id);
1097 if (am == klass)
break;
1099 if (is_defined)
return 1;
1100 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1102 goto search_continue;
1109 if (UNLIKELY(!rb_ractor_main_p())) {
1112 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1123 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1124 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1138 vm_check_if_namespace(orig_klass);
1140 return rb_public_const_defined_from(orig_klass,
id);
1143 return rb_public_const_get_from(orig_klass,
id);
1151 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1159 int allow_nil = TRUE;
1160 if (segments[0] == idNULL) {
1165 while (segments[idx]) {
1166 ID id = segments[idx++];
1167 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1180 rb_bug(
"vm_get_cvar_base: no cref");
1183 while (CREF_NEXT(cref) &&
1184 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1185 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1186 cref = CREF_NEXT(cref);
1188 if (top_level_raise && !CREF_NEXT(cref)) {
1192 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1200 ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1202 fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1205 vm_cc_attr_index_set(cc, index, shape_id);
1208 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1212 #define ractor_incidental_shareable_p(cond, val) \
1213 (!(cond) || rb_ractor_shareable_p(val))
1214 #define ractor_object_incidental_shareable_p(obj, val) \
1215 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1217 #define ATTR_INDEX_NOT_SET (attr_index_t)-1
1225 shape_id_t shape_id;
1229 return default_value;
1232 #if SHAPE_IN_BASIC_FLAGS
1233 shape_id = RBASIC_SHAPE_ID(obj);
1241 #if !SHAPE_IN_BASIC_FLAGS
1242 shape_id = ROBJECT_SHAPE_ID(obj);
1248 if (UNLIKELY(!rb_ractor_main_p())) {
1256 if (default_value ==
Qundef) {
1264 ivar_list = RCLASS_IVPTR(obj);
1266 #if !SHAPE_IN_BASIC_FLAGS
1267 shape_id = RCLASS_SHAPE_ID(obj);
1275 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1276 #if !SHAPE_IN_BASIC_FLAGS
1277 shape_id = ivtbl->shape_id;
1279 ivar_list = ivtbl->as.shape.ivptr;
1282 return default_value;
1286 shape_id_t cached_id;
1290 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1293 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1296 if (LIKELY(cached_id == shape_id)) {
1297 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1299 if (index == ATTR_INDEX_NOT_SET) {
1300 return default_value;
1303 val = ivar_list[index];
1304 #if USE_DEBUG_COUNTER
1305 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1308 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1314 #if USE_DEBUG_COUNTER
1316 if (cached_id != INVALID_SHAPE_ID) {
1317 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1320 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1324 if (cached_id != INVALID_SHAPE_ID) {
1325 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1328 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1331 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1334 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1338 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1343 table = (
st_table *)RCLASS_IVPTR(obj);
1347 table = ROBJECT_IV_HASH(obj);
1352 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1353 table = ivtbl->as.complex.table;
1359 if (!table || !st_lookup(table,
id, &val)) {
1360 val = default_value;
1364 shape_id_t previous_cached_id = cached_id;
1365 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1368 if (cached_id != previous_cached_id) {
1369 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1372 if (index == ATTR_INDEX_NOT_SET) {
1373 val = default_value;
1377 val = ivar_list[index];
1383 vm_cc_attr_index_initialize(cc, shape_id);
1386 vm_ic_attr_index_initialize(ic, shape_id);
1389 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1414 populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1438 rb_check_frozen(obj);
1440 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1442 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1444 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1445 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1448 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1458 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1464 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1467 NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1469 vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1471 #if SHAPE_IN_BASIC_FLAGS
1472 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1474 shape_id_t shape_id = rb_generic_shape_id(obj);
1480 if (shape_id == dest_shape_id) {
1481 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1483 else if (dest_shape_id != INVALID_SHAPE_ID) {
1484 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1485 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1487 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1498 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1500 if (shape_id != dest_shape_id) {
1501 #if SHAPE_IN_BASIC_FLAGS
1502 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1504 ivtbl->shape_id = dest_shape_id;
1510 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1516 vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1524 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1525 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1527 if (LIKELY(shape_id == dest_shape_id)) {
1528 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1531 else if (dest_shape_id != INVALID_SHAPE_ID) {
1532 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1533 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1534 shape_id_t source_shape_id = dest_shape->parent_id;
1536 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1537 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1539 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1541 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1557 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1558 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1564 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1576 VALUE defined_class = 0;
1580 defined_class =
RBASIC(defined_class)->klass;
1583 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1585 rb_bug(
"the cvc table should be set");
1589 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1590 rb_bug(
"should have cvar cache entry");
1595 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1611 cref = vm_get_cref(GET_EP());
1613 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1614 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1616 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1622 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1624 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1630 return vm_getclassvariable(iseq, cfp,
id, ic);
1637 cref = vm_get_cref(GET_EP());
1639 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1640 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1642 rb_class_ivar_set(ic->entry->class_value,
id, val);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1650 update_classvariable_cache(iseq, klass,
id, cref, ic);
1656 vm_setclassvariable(iseq, cfp,
id, val, ic);
1662 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1673 shape_id_t dest_shape_id;
1675 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1677 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1684 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1688 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1695 vm_setinstancevariable(iseq, obj,
id, val, ic);
1704 ec->tag->state = RUBY_TAG_FATAL;
1707 ec->tag->state = TAG_THROW;
1709 else if (THROW_DATA_P(err)) {
1710 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1713 ec->tag->state = TAG_RAISE;
1720 const int flag,
const VALUE throwobj)
1728 else if (state == TAG_BREAK) {
1730 const VALUE *ep = GET_EP();
1731 const rb_iseq_t *base_iseq = GET_ISEQ();
1732 escape_cfp = reg_cfp;
1734 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1735 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1736 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1737 ep = escape_cfp->ep;
1738 base_iseq = escape_cfp->iseq;
1741 ep = VM_ENV_PREV_EP(ep);
1742 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1743 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1744 VM_ASSERT(escape_cfp->iseq == base_iseq);
1748 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1754 ep = VM_ENV_PREV_EP(ep);
1756 while (escape_cfp < eocfp) {
1757 if (escape_cfp->ep == ep) {
1758 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1759 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1764 for (i=0; i < ct->size; i++) {
1766 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1768 if (entry->type == CATCH_TYPE_BREAK &&
1769 entry->iseq == base_iseq &&
1770 entry->start < epc && entry->end >= epc) {
1771 if (entry->cont == epc) {
1780 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1785 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1788 else if (state == TAG_RETRY) {
1789 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1791 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1793 else if (state == TAG_RETURN) {
1794 const VALUE *current_ep = GET_EP();
1795 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1796 int in_class_frame = 0;
1798 escape_cfp = reg_cfp;
1801 while (!VM_ENV_LOCAL_P(ep)) {
1802 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1805 ep = VM_ENV_PREV_EP(ep);
1809 while (escape_cfp < eocfp) {
1810 const VALUE *lep = VM_CF_LEP(escape_cfp);
1816 if (lep == target_lep &&
1817 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1818 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1823 if (lep == target_lep) {
1824 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1826 if (in_class_frame) {
1831 const VALUE *tep = current_ep;
1833 while (target_lep != tep) {
1834 if (escape_cfp->ep == tep) {
1836 if (tep == target_ep) {
1840 goto unexpected_return;
1843 tep = VM_ENV_PREV_EP(tep);
1847 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1848 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1850 case ISEQ_TYPE_MAIN:
1852 if (in_class_frame)
goto unexpected_return;
1853 if (target_ep == NULL) {
1857 goto unexpected_return;
1861 case ISEQ_TYPE_EVAL: {
1863 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1864 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1865 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1866 t = ISEQ_BODY(is)->type;
1868 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1871 case ISEQ_TYPE_CLASS:
1880 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1881 if (target_ep == NULL) {
1885 goto unexpected_return;
1889 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1892 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1898 rb_bug(
"isns(throw): unsupported throw type");
1901 ec->tag->state = state;
1902 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1907 rb_num_t throw_state,
VALUE throwobj)
1909 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1910 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1913 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1916 return vm_throw_continue(ec, throwobj);
1923 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1929 int is_splat = flag & 0x01;
1932 const VALUE obj = ary;
1944 if (num + is_splat == 0) {
1947 else if (flag & 0x02) {
1952 for (i = 0; i < num -
len; i++) {
1957 for (j = 0; i < num; i++, j++) {
1979 for (; i < num -
len; i++) {
1983 for (rb_num_t j = 0; i < num; i++, j++) {
1984 *cfp->sp++ =
ptr[
len - j - 1];
1988 for (rb_num_t j = 0; j < num; j++) {
1989 *cfp->sp++ =
ptr[num - j - 1];
2005 #if VM_CHECK_MODE > 0
2006 ccs->debug_sig = ~(
VALUE)ccs;
2012 ccs->entries = NULL;
2014 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2022 if (! vm_cc_markable(cc)) {
2026 if (UNLIKELY(ccs->len == ccs->capa)) {
2027 if (ccs->capa == 0) {
2029 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2033 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2036 VM_ASSERT(ccs->len < ccs->capa);
2038 const int pos = ccs->len++;
2039 ccs->entries[pos].argc = vm_ci_argc(ci);
2040 ccs->entries[pos].flag = vm_ci_flag(ci);
2043 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2049 #if VM_CHECK_MODE > 0
2053 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2054 for (
int i=0; i<ccs->len; i++) {
2055 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2056 ccs->entries[i].flag,
2057 ccs->entries[i].argc);
2058 rp(ccs->entries[i].cc);
2065 VM_ASSERT(vm_ccs_p(ccs));
2066 VM_ASSERT(ccs->len <= ccs->capa);
2068 for (
int i=0; i<ccs->len; i++) {
2071 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2072 VM_ASSERT(vm_cc_class_check(cc, klass));
2073 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2074 VM_ASSERT(!vm_cc_super_p(cc));
2075 VM_ASSERT(!vm_cc_refinement_p(cc));
2086 const ID mid = vm_ci_mid(ci);
2087 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2094 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2096 const int ccs_len = ccs->len;
2098 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2099 rb_vm_ccs_free(ccs);
2100 rb_id_table_delete(cc_tbl, mid);
2104 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2109 unsigned int argc = vm_ci_argc(ci);
2110 unsigned int flag = vm_ci_flag(ci);
2112 for (
int i=0; i<ccs_len; i++) {
2113 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2114 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2115 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2117 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2119 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2120 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2122 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2123 VM_ASSERT(ccs_cc->klass == klass);
2124 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2133 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2136 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2142 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2144 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2147 cme = rb_callable_method_entry(klass, mid);
2150 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2154 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2155 return &vm_empty_cc;
2158 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2163 VM_ASSERT(cc_tbl != NULL);
2165 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2171 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2175 cme = rb_check_overloaded_cme(cme, ci);
2177 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2178 vm_ccs_push(klass, ccs, ci, cc);
2180 VM_ASSERT(vm_cc_cme(cc) != NULL);
2181 VM_ASSERT(cme->called_id == mid);
2182 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2196 cc = vm_search_cc(klass, ci);
2199 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2200 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2201 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2202 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2203 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2213 #if USE_DEBUG_COUNTER
2217 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2219 #if OPT_INLINE_METHOD_CACHE
2223 if (cd_owner && cc != empty_cc) {
2227 #if USE_DEBUG_COUNTER
2228 if (!old_cc || old_cc == empty_cc) {
2230 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2232 else if (old_cc == cc) {
2233 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2235 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2238 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2239 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2240 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2243 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2248 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2249 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2260 #if OPT_INLINE_METHOD_CACHE
2261 if (LIKELY(vm_cc_class_check(cc, klass))) {
2262 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2263 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2264 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2265 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2266 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2267 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2271 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2274 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2278 return vm_search_method_slowpath0(cd_owner, cd, klass);
2285 VM_ASSERT(klass !=
Qfalse);
2288 return vm_search_method_fastpath(cd_owner, cd, klass);
2291 #if __has_attribute(transparent_union)
2304 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2306 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2307 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2308 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2309 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2312 # define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2315 # define make_cfunc_type(f) (cfunc_type)(f)
2325 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2326 VM_ASSERT(callable_method_entry_p(me));
2328 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2332 #if __has_attribute(transparent_union)
2333 return me->def->body.cfunc.func == func.anyargs;
2335 return me->def->body.cfunc.func == func;
2344 VM_ASSERT(iseq != NULL);
2346 return check_cfunc(vm_cc_cme(cc), func);
2349 #define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2350 #define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2352 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2384 opt_equality_specialized(
VALUE recv,
VALUE obj)
2386 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2387 goto compare_by_identity;
2389 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2390 goto compare_by_identity;
2393 goto compare_by_identity;
2402 #if MSC_VERSION_BEFORE(1300)
2406 else if (isnan(b)) {
2411 return RBOOL(a == b);
2418 return rb_str_eql_internal(obj, recv);
2423 compare_by_identity:
2424 return RBOOL(recv == obj);
2430 VM_ASSERT(cd_owner != NULL);
2432 VALUE val = opt_equality_specialized(recv, obj);
2433 if (!UNDEF_P(val))
return val;
2435 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2439 return RBOOL(recv == obj);
2443 #undef EQ_UNREDEFINED_P
2446 NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2449 opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2451 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2453 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2454 return RBOOL(recv == obj);
2464 VALUE val = opt_equality_specialized(recv, obj);
2465 if (!UNDEF_P(val)) {
2469 return opt_equality_by_mid_slowpath(recv, obj, mid);
2476 return opt_equality_by_mid(obj1, obj2, idEq);
2482 return opt_equality_by_mid(obj1, obj2, idEqlP);
2492 case VM_CHECKMATCH_TYPE_WHEN:
2494 case VM_CHECKMATCH_TYPE_RESCUE:
2499 case VM_CHECKMATCH_TYPE_CASE: {
2500 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2503 rb_bug(
"check_match: unreachable");
2508 #if MSC_VERSION_BEFORE(1300)
2509 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2511 #define CHECK_CMP_NAN(a, b)
2515 double_cmp_lt(
double a,
double b)
2517 CHECK_CMP_NAN(a, b);
2518 return RBOOL(a < b);
2522 double_cmp_le(
double a,
double b)
2524 CHECK_CMP_NAN(a, b);
2525 return RBOOL(a <= b);
2529 double_cmp_gt(
double a,
double b)
2531 CHECK_CMP_NAN(a, b);
2532 return RBOOL(a > b);
2536 double_cmp_ge(
double a,
double b)
2538 CHECK_CMP_NAN(a, b);
2539 return RBOOL(a >= b);
2543 static inline VALUE *
2548 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2549 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2551 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2552 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2553 int params = ISEQ_BODY(cfp->iseq)->param.size;
2556 bp += vm_ci_argc(ci);
2559 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2563 #if VM_DEBUG_BP_CHECK
2564 if (bp != cfp->bp_check) {
2565 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2566 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2567 (
long)(bp - GET_EC()->vm_stack));
2568 rb_bug(
"vm_base_ptr: unreachable");
2581 return vm_base_ptr(cfp);
2586 #include "vm_args.c"
2596 static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2601 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2603 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2609 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2612 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2613 int param = ISEQ_BODY(iseq)->param.size;
2614 int local = ISEQ_BODY(iseq)->local_table_size;
2615 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2621 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2632 rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2634 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2635 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2641 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2645 rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2647 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2651 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2653 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2656 #define ALLOW_HEAP_ARGV (-2)
2657 #define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2662 vm_check_canary(GET_EC(), cfp->sp);
2668 int argc = calling->argc;
2670 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2674 VALUE *argv = cfp->sp - argc;
2678 cfp->sp -= argc - 1;
2679 cfp->sp[-1] = argv_ary;
2681 calling->heap_argv = argv_ary;
2687 if (max_args >= 0 &&
len + argc > max_args) {
2695 calling->argc +=
len - (max_args - argc + 1);
2696 len = max_args - argc + 1;
2705 calling->heap_argv = 0;
2707 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2709 for (i = 0; i <
len; i++) {
2710 *cfp->sp++ =
ptr[i];
2722 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2723 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2724 const VALUE h = rb_hash_new_with_size(kw_len);
2725 VALUE *sp = cfp->sp;
2728 for (i=0; i<kw_len; i++) {
2733 cfp->sp -= kw_len - 1;
2734 calling->argc -= kw_len - 1;
2735 calling->kw_splat = 1;
2739 vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2742 if (keyword_hash !=
Qnil) {
2744 keyword_hash =
rb_hash_dup(rb_to_hash_type(keyword_hash));
2747 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2755 return keyword_hash;
2761 const struct rb_callinfo *restrict ci,
int max_args)
2763 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2764 if (IS_ARGS_KW_SPLAT(ci)) {
2766 VM_ASSERT(calling->kw_splat == 1);
2770 VALUE ary = cfp->sp[0];
2771 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2774 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2778 if (UNLIKELY(calling->heap_argv)) {
2780 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2781 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2782 calling->kw_splat = 0;
2790 VM_ASSERT(calling->kw_splat == 1);
2794 calling->kw_splat = 0;
2799 VM_ASSERT(calling->kw_splat == 0);
2803 VALUE ary = cfp->sp[0];
2805 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2810 VALUE last_hash, argv_ary;
2811 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2812 if (!IS_ARGS_KEYWORD(ci) &&
2815 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2820 calling->kw_splat = 1;
2826 if (!IS_ARGS_KEYWORD(ci) &&
2827 calling->argc > 0 &&
2829 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2837 calling->kw_splat = 1;
2843 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2845 VM_ASSERT(calling->kw_splat == 1);
2846 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2851 calling->kw_splat = 0;
2857 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2859 VM_ASSERT(calling->kw_splat == 0);
2865 vm_caller_setup_arg_kw(cfp, calling, ci);
2869 #define USE_OPT_HIST 0
2872 #define OPT_HIST_MAX 64
2873 static int opt_hist[OPT_HIST_MAX+1];
2877 opt_hist_show_results_at_exit(
void)
2879 for (
int i=0; i<OPT_HIST_MAX; i++) {
2880 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2890 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2891 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2892 const int opt = calling->argc - lead_num;
2893 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2894 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2895 const int param = ISEQ_BODY(iseq)->param.size;
2896 const int local = ISEQ_BODY(iseq)->local_table_size;
2897 const int delta = opt_num - opt;
2899 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2902 if (opt_pc < OPT_HIST_MAX) {
2906 opt_hist[OPT_HIST_MAX]++;
2910 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2918 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2919 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2920 const int opt = calling->argc - lead_num;
2921 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2923 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2926 if (opt_pc < OPT_HIST_MAX) {
2930 opt_hist[OPT_HIST_MAX]++;
2934 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2939 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2940 VALUE *
const locals);
2947 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2948 int param_size = ISEQ_BODY(iseq)->param.size;
2949 int local_size = ISEQ_BODY(iseq)->local_table_size;
2952 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2954 local_size = local_size + vm_ci_argc(calling->cd->ci);
2955 param_size = param_size + vm_ci_argc(calling->cd->ci);
2957 cfp->sp[0] = (
VALUE)calling->cd->ci;
2959 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2969 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2970 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2972 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2973 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2975 const int ci_kw_len = kw_arg->keyword_len;
2976 const VALUE *
const ci_keywords = kw_arg->keywords;
2977 VALUE *argv = cfp->sp - calling->argc;
2978 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2979 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2981 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2982 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2984 int param = ISEQ_BODY(iseq)->param.size;
2985 int local = ISEQ_BODY(iseq)->local_table_size;
2986 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2993 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2996 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2997 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2999 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3000 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3001 VALUE *
const argv = cfp->sp - calling->argc;
3002 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3005 for (i=0; i<kw_param->num; i++) {
3006 klocals[i] = kw_param->default_values[i];
3013 int param = ISEQ_BODY(iseq)->param.size;
3014 int local = ISEQ_BODY(iseq)->local_table_size;
3015 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3025 cfp->sp -= (calling->argc + 1);
3026 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3027 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3036 st_table *dup_check_table = vm->unused_block_warning_table;
3046 .v = (
VALUE)cme->def,
3050 if (!strict_unused_block) {
3051 key = (st_data_t)cme->def->original_id;
3053 if (st_lookup(dup_check_table, key, NULL)) {
3063 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3068 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3069 fprintf(stderr,
"key:%p\n", (
void *)key);
3073 if (st_insert(dup_check_table, key, 1)) {
3078 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3080 if (!
NIL_P(m_loc)) {
3081 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3085 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3092 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3097 VM_ASSERT((vm_ci_argc(ci), 1));
3098 VM_ASSERT(vm_cc_cme(cc) != NULL);
3100 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3101 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3102 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3103 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3106 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3107 if (LIKELY(rb_simple_iseq_p(iseq))) {
3109 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3110 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3112 if (calling->argc != lead_num) {
3113 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3117 VM_ASSERT(cc == calling->cc);
3119 if (vm_call_iseq_optimizable_p(ci, cc)) {
3120 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3122 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3123 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3124 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3127 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3132 else if (rb_iseq_only_optparam_p(iseq)) {
3135 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3136 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3138 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3139 const int argc = calling->argc;
3140 const int opt = argc - lead_num;
3142 if (opt < 0 || opt > opt_num) {
3143 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3146 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3147 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3148 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3149 vm_call_cacheable(ci, cc));
3152 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3153 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3154 vm_call_cacheable(ci, cc));
3158 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3159 for (
int i=argc; i<lead_num + opt_num; i++) {
3162 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3164 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3165 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3166 const int argc = calling->argc;
3167 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3169 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3172 if (argc - kw_arg->keyword_len == lead_num) {
3173 const int ci_kw_len = kw_arg->keyword_len;
3174 const VALUE *
const ci_keywords = kw_arg->keywords;
3176 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3178 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3179 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3181 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3182 vm_call_cacheable(ci, cc));
3187 else if (argc == lead_num) {
3189 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3190 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3192 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3194 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3195 vm_call_cacheable(ci, cc));
3221 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3222 bool can_fastpath =
true;
3224 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3226 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3227 ci = vm_ci_new_runtime(
3233 ci = forward_cd->caller_ci;
3235 can_fastpath =
false;
3239 if (!vm_ci_markable(ci)) {
3240 ci = vm_ci_new_runtime(
3245 can_fastpath =
false;
3247 argv[param_size - 1] = (
VALUE)ci;
3248 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3252 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3279 const VALUE * lep = VM_CF_LEP(cfp);
3285 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3290 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3294 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3296 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3297 VALUE * to = cfp->sp - 1;
3301 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3306 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3308 cfp->sp = to + argc;
3327 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3330 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3331 int param_size = ISEQ_BODY(iseq)->param.size;
3332 int local_size = ISEQ_BODY(iseq)->local_table_size;
3334 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3336 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3337 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3343 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3346 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3347 int param_size = ISEQ_BODY(iseq)->param.size;
3348 int local_size = ISEQ_BODY(iseq)->local_table_size;
3350 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3353 local_size = local_size + vm_ci_argc(calling->cd->ci);
3354 param_size = param_size + vm_ci_argc(calling->cd->ci);
3356 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3357 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3362 int opt_pc,
int param_size,
int local_size)
3367 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3368 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3371 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3377 int opt_pc,
int param_size,
int local_size)
3379 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3380 VALUE *argv = cfp->sp - calling->argc;
3381 VALUE *sp = argv + param_size;
3382 cfp->sp = argv - 1 ;
3384 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3385 calling->block_handler, (
VALUE)me,
3386 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3387 local_size - param_size,
3388 ISEQ_BODY(iseq)->stack_max);
3397 VALUE *argv = cfp->sp - calling->argc;
3399 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3400 VALUE *src_argv = argv;
3401 VALUE *sp_orig, *sp;
3402 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3404 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3405 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3406 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3407 dst_captured->code.val = src_captured->code.val;
3408 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3409 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3412 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3416 vm_pop_frame(ec, cfp, cfp->ep);
3419 sp_orig = sp = cfp->sp;
3422 sp[0] = calling->recv;
3426 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3427 *sp++ = src_argv[i];
3430 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3431 calling->recv, calling->block_handler, (
VALUE)me,
3432 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3433 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3434 ISEQ_BODY(iseq)->stack_max);
3442 ractor_unsafe_check(
void)
3444 if (!rb_ractor_main_p()) {
3445 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3452 ractor_unsafe_check();
3460 ractor_unsafe_check();
3462 return (*f)(argc, argv, recv);
3468 ractor_unsafe_check();
3476 ractor_unsafe_check();
3478 return (*f)(recv, argv[0]);
3484 ractor_unsafe_check();
3486 return (*f)(recv, argv[0], argv[1]);
3492 ractor_unsafe_check();
3494 return (*f)(recv, argv[0], argv[1], argv[2]);
3500 ractor_unsafe_check();
3502 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3508 ractor_unsafe_check();
3509 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3510 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3516 ractor_unsafe_check();
3517 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3518 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3524 ractor_unsafe_check();
3525 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3526 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3532 ractor_unsafe_check();
3533 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3534 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3540 ractor_unsafe_check();
3541 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3542 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3548 ractor_unsafe_check();
3549 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3550 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3556 ractor_unsafe_check();
3557 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3558 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3564 ractor_unsafe_check();
3565 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3566 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3572 ractor_unsafe_check();
3573 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3574 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3580 ractor_unsafe_check();
3581 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3582 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3588 ractor_unsafe_check();
3589 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3590 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3604 return (*f)(argc, argv, recv);
3618 return (*f)(recv, argv[0]);
3625 return (*f)(recv, argv[0], argv[1]);
3632 return (*f)(recv, argv[0], argv[1], argv[2]);
3639 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3645 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3646 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3652 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3653 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3659 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3660 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3666 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3667 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3673 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3674 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3680 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3681 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3687 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3688 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3694 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3695 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3701 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3702 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3708 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3709 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3715 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3716 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3722 const int ov_flags = RAISED_STACKOVERFLOW;
3723 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3724 if (rb_ec_raised_p(ec, ov_flags)) {
3725 rb_ec_raised_reset(ec, ov_flags);
3731 #define CHECK_CFP_CONSISTENCY(func) \
3732 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3733 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3739 #if VM_DEBUG_VERIFY_METHOD_CACHE
3740 switch (me->def->type) {
3741 case VM_METHOD_TYPE_CFUNC:
3742 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3744 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3746 METHOD_BUG(ATTRSET);
3748 METHOD_BUG(BMETHOD);
3751 METHOD_BUG(OPTIMIZED);
3752 METHOD_BUG(MISSING);
3753 METHOD_BUG(REFINED);
3757 rb_bug(
"wrong method type: %d", me->def->type);
3760 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3767 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3774 VALUE recv = calling->recv;
3775 VALUE block_handler = calling->block_handler;
3776 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3778 if (UNLIKELY(calling->kw_splat)) {
3779 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3782 VM_ASSERT(reg_cfp == ec->cfp);
3784 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3787 vm_push_frame(ec, NULL, frame_type, recv,
3788 block_handler, (
VALUE)me,
3789 0, ec->cfp->sp, 0, 0);
3791 int len = cfunc->argc;
3794 reg_cfp->sp = stack_bottom;
3795 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3797 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3799 rb_vm_pop_frame(ec);
3801 VM_ASSERT(ec->cfp->sp == stack_bottom);
3803 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3804 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3814 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3816 VALUE *sp = ec->cfp->sp;
3817 VALUE recv = *(sp - recv_idx - 1);
3818 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3819 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3820 #if VM_CHECK_MODE > 0
3822 *(GET_EC()->cfp->sp) =
Qfalse;
3824 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3829 rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3831 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3837 int argc = calling->argc;
3838 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3839 VALUE *argv = &stack_bottom[1];
3841 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3848 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3850 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3852 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3853 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3856 VALUE *stack_bottom = reg_cfp->sp - 2;
3858 VM_ASSERT(calling->argc == 1);
3862 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3865 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3867 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3874 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3877 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3878 return vm_call_cfunc_other(ec, reg_cfp, calling);
3882 calling->kw_splat = 0;
3884 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3885 VALUE *sp = stack_bottom;
3886 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3887 for(i = 0; i < argc; i++) {
3892 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3898 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3899 VALUE argv_ary = reg_cfp->sp[-1];
3903 int argc_offset = 0;
3905 if (UNLIKELY(argc > 0 &&
3907 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3909 return vm_call_cfunc_other(ec, reg_cfp, calling);
3913 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3919 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3920 VALUE keyword_hash = reg_cfp->sp[-1];
3923 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3926 return vm_call_cfunc_other(ec, reg_cfp, calling);
3933 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3935 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3936 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3938 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3939 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3941 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3943 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3944 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3948 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3949 return vm_call_cfunc_other(ec, reg_cfp, calling);
3956 RB_DEBUG_COUNTER_INC(ccf_ivar);
3958 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3965 RB_DEBUG_COUNTER_INC(ccf_attrset);
3966 VALUE val = *(cfp->sp - 1);
3968 attr_index_t index = vm_cc_attr_index(cc);
3969 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3970 ID id = vm_cc_cme(cc)->def->body.attr.id;
3971 rb_check_frozen(obj);
3972 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3981 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3982 if (!UNDEF_P(res)) {
3987 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3995 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4005 VALUE procv = cme->def->body.bmethod.proc;
4008 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4013 GetProcPtr(procv, proc);
4014 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4024 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4028 VALUE procv = cme->def->body.bmethod.proc;
4031 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4036 GetProcPtr(procv, proc);
4037 const struct rb_block *block = &proc->block;
4039 while (vm_block_type(block) == block_type_proc) {
4040 block = vm_proc_block(block->as.proc);
4042 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4045 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4046 VALUE *
const argv = cfp->sp - calling->argc;
4047 const int arg_size = ISEQ_BODY(iseq)->param.size;
4050 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4051 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4054 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4059 vm_push_frame(ec, iseq,
4060 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4062 VM_GUARDED_PREV_EP(captured->ep),
4064 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4066 ISEQ_BODY(iseq)->local_table_size - arg_size,
4067 ISEQ_BODY(iseq)->stack_max);
4075 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4079 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4080 if (UNLIKELY(calling->heap_argv)) {
4085 argc = calling->argc;
4088 cfp->sp += - argc - 1;
4091 return vm_call_bmethod_body(ec, calling, argv);
4097 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4101 VALUE procv = cme->def->body.bmethod.proc;
4103 GetProcPtr(procv, proc);
4104 const struct rb_block *block = &proc->block;
4106 while (vm_block_type(block) == block_type_proc) {
4107 block = vm_proc_block(block->as.proc);
4109 if (vm_block_type(block) == block_type_iseq) {
4110 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4111 return vm_call_iseq_bmethod(ec, cfp, calling);
4114 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4115 return vm_call_noniseq_bmethod(ec, cfp, calling);
4119 rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4121 VALUE klass = current_class;
4129 while (
RTEST(klass)) {
4131 if (owner == target_owner) {
4137 return current_class;
4146 if (orig_me->defined_class == 0) {
4147 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4148 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4149 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4151 if (me->def->reference_count == 1) {
4152 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4156 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4164 VM_ASSERT(callable_method_entry_p(cme));
4171 return aliased_callable_method_entry(me);
4177 calling->cc = &VM_CC_ON_STACK(
Qundef,
4180 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4182 return vm_call_method_each_type(ec, cfp, calling);
4185 static enum method_missing_reason
4188 enum method_missing_reason stat = MISSING_NOENTRY;
4189 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4190 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4191 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4201 ASSUME(calling->argc >= 0);
4203 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4204 int argc = calling->argc;
4205 VALUE recv = calling->recv;
4208 flags |= VM_CALL_OPT_SEND;
4210 if (UNLIKELY(! mid)) {
4211 mid = idMethodMissing;
4212 missing_reason = ci_missing_reason(ci);
4213 ec->method_missing_reason = missing_reason;
4216 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4221 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4222 VALUE exc = rb_make_no_method_exception(
4244 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4247 argc = ++calling->argc;
4252 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4253 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4254 VALUE exc = rb_make_no_method_exception(
4267 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4273 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4274 calling->cd = &new_fcd.cd;
4278 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4279 new_fcd.caller_ci = caller_ci;
4282 calling->cc = &VM_CC_ON_STACK(klass,
4284 { .method_missing_reason = missing_reason },
4285 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4287 if (flags & VM_CALL_FCALL) {
4288 return vm_call_method(ec, reg_cfp, calling);
4292 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4294 if (vm_cc_cme(cc) != NULL) {
4295 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4296 case METHOD_VISI_PUBLIC:
4297 return vm_call_method_each_type(ec, reg_cfp, calling);
4298 case METHOD_VISI_PRIVATE:
4299 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4301 case METHOD_VISI_PROTECTED:
4302 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4305 VM_UNREACHABLE(vm_call_method);
4307 return vm_call_method_missing(ec, reg_cfp, calling);
4310 return vm_call_method_nome(ec, reg_cfp, calling);
4320 i = calling->argc - 1;
4322 if (calling->argc == 0) {
4347 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4353 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4355 int flags = VM_CALL_FCALL;
4359 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4360 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4362 flags |= VM_CALL_ARGS_SPLAT;
4363 if (calling->kw_splat) {
4364 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4365 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4366 calling->kw_splat = 0;
4368 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4371 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4372 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4378 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4379 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4385 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4388 int flags = vm_ci_flag(ci);
4390 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4391 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4392 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4393 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4394 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4395 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4398 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4399 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4404 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4406 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4408 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4409 unsigned int argc, flag;
4411 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4412 argc = ++calling->argc;
4415 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4416 vm_check_canary(ec, reg_cfp->sp);
4420 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4423 ec->method_missing_reason = reason;
4427 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4433 if (!(flag & VM_CALL_FORWARDING)) {
4434 calling->cd = &new_fcd.cd;
4438 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4439 new_fcd.caller_ci = caller_ci;
4443 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4444 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4445 return vm_call_method(ec, reg_cfp, calling);
4451 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4462 return vm_call_method_nome(ec, cfp, calling);
4464 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4465 cme->def->body.refined.orig_me) {
4466 cme = refined_method_callable_without_refinement(cme);
4469 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4471 return vm_call_method_each_type(ec, cfp, calling);
4475 find_refinement(
VALUE refinements,
VALUE klass)
4477 if (
NIL_P(refinements)) {
4489 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4490 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4493 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4494 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4498 }
while (cfp->iseq != local_iseq);
4509 if (orig_me->defined_class == 0) {
4517 VM_ASSERT(callable_method_entry_p(cme));
4519 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4529 ID mid = vm_ci_mid(calling->cd->ci);
4530 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4534 for (; cref; cref = CREF_NEXT(cref)) {
4535 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4536 if (
NIL_P(refinement))
continue;
4539 rb_callable_method_entry(refinement, mid);
4542 if (vm_cc_call(cc) == vm_call_super_method) {
4545 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4550 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4551 cme->def != ref_me->def) {
4554 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4563 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4564 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4579 if (calling->cd->cc) {
4580 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4582 return vm_call_method(ec, cfp, calling);
4585 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4586 calling->cc= ref_cc;
4587 return vm_call_method(ec, cfp, calling);
4591 return vm_call_method_nome(ec, cfp, calling);
4597 NOINLINE(
static VALUE
4605 int argc = calling->argc;
4608 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4611 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4617 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4620 VALUE procval = calling->recv;
4621 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4627 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4629 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4632 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4633 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4636 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4637 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4638 return vm_call_general(ec, reg_cfp, calling);
4645 VALUE recv = calling->recv;
4648 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4649 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4651 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4652 return internal_RSTRUCT_GET(recv,
off);
4658 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4660 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4668 VALUE recv = calling->recv;
4671 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4672 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4674 rb_check_frozen(recv);
4676 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4677 internal_RSTRUCT_SET(recv,
off, val);
4685 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4687 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4695 #define VM_CALL_METHOD_ATTR(var, func, nohook) \
4696 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4697 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4698 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4700 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4701 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4712 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4713 case OPTIMIZED_METHOD_TYPE_SEND:
4714 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4715 return vm_call_opt_send(ec, cfp, calling);
4716 case OPTIMIZED_METHOD_TYPE_CALL:
4717 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4718 return vm_call_opt_call(ec, cfp, calling);
4719 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4720 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4721 return vm_call_opt_block_call(ec, cfp, calling);
4722 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4723 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4727 VM_CALL_METHOD_ATTR(v,
4728 vm_call_opt_struct_aref(ec, cfp, calling),
4729 set_vm_cc_ivar(cc); \
4730 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4733 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4734 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4738 VM_CALL_METHOD_ATTR(v,
4739 vm_call_opt_struct_aset(ec, cfp, calling),
4740 set_vm_cc_ivar(cc); \
4741 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4745 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4757 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4759 switch (cme->def->type) {
4760 case VM_METHOD_TYPE_ISEQ:
4761 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4762 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4763 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4766 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4767 return vm_call_iseq_setup(ec, cfp, calling);
4770 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4771 case VM_METHOD_TYPE_CFUNC:
4772 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4773 return vm_call_cfunc(ec, cfp, calling);
4775 case VM_METHOD_TYPE_ATTRSET:
4776 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4780 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4782 if (vm_cc_markable(cc)) {
4783 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4784 VM_CALL_METHOD_ATTR(v,
4785 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4786 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4792 VM_CALLCACHE_UNMARKABLE |
4793 VM_CALLCACHE_ON_STACK,
4799 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4804 VM_CALL_METHOD_ATTR(v,
4805 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4806 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4810 case VM_METHOD_TYPE_IVAR:
4811 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4813 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4814 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4815 VM_CALL_METHOD_ATTR(v,
4816 vm_call_ivar(ec, cfp, calling),
4817 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4820 case VM_METHOD_TYPE_MISSING:
4821 vm_cc_method_missing_reason_set(cc, 0);
4822 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4823 return vm_call_method_missing(ec, cfp, calling);
4825 case VM_METHOD_TYPE_BMETHOD:
4826 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4827 return vm_call_bmethod(ec, cfp, calling);
4829 case VM_METHOD_TYPE_ALIAS:
4830 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4831 return vm_call_alias(ec, cfp, calling);
4833 case VM_METHOD_TYPE_OPTIMIZED:
4834 return vm_call_optimized(ec, cfp, calling, ci, cc);
4836 case VM_METHOD_TYPE_UNDEF:
4839 case VM_METHOD_TYPE_ZSUPER:
4840 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4842 case VM_METHOD_TYPE_REFINED:
4845 return vm_call_refined(ec, cfp, calling);
4848 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4858 const int stat = ci_missing_reason(ci);
4860 if (vm_ci_mid(ci) == idMethodMissing) {
4861 if (UNLIKELY(calling->heap_argv)) {
4866 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4867 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4871 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4883 VALUE defined_class = me->defined_class;
4884 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4885 return NIL_P(refined_class) ? defined_class : refined_class;
4894 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4896 if (vm_cc_cme(cc) != NULL) {
4897 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4898 case METHOD_VISI_PUBLIC:
4899 return vm_call_method_each_type(ec, cfp, calling);
4901 case METHOD_VISI_PRIVATE:
4902 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4903 enum method_missing_reason stat = MISSING_PRIVATE;
4904 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4906 vm_cc_method_missing_reason_set(cc, stat);
4907 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4908 return vm_call_method_missing(ec, cfp, calling);
4910 return vm_call_method_each_type(ec, cfp, calling);
4912 case METHOD_VISI_PROTECTED:
4913 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4914 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4916 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4917 return vm_call_method_missing(ec, cfp, calling);
4921 VM_ASSERT(vm_cc_cme(cc) != NULL);
4924 calling->cc = &cc_on_stack;
4925 return vm_call_method_each_type(ec, cfp, calling);
4928 return vm_call_method_each_type(ec, cfp, calling);
4935 return vm_call_method_nome(ec, cfp, calling);
4942 RB_DEBUG_COUNTER_INC(ccf_general);
4943 return vm_call_method(ec, reg_cfp, calling);
4949 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4950 VM_ASSERT(cc != vm_cc_empty());
4952 *(vm_call_handler *)&cc->call_ = vm_call_general;
4958 RB_DEBUG_COUNTER_INC(ccf_super_method);
4963 if (ec == NULL)
rb_bug(
"unreachable");
4966 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4967 return vm_call_method(ec, reg_cfp, calling);
4973 vm_search_normal_superclass(
VALUE klass)
4978 klass =
RBASIC(klass)->klass;
4980 klass = RCLASS_ORIGIN(klass);
4984 NORETURN(
static void vm_super_outside(
void));
4987 vm_super_outside(
void)
4993 empty_cc_for_super(
void)
4995 return &vm_empty_cc_for_super;
5001 VALUE current_defined_class;
5008 current_defined_class = vm_defined_class_for_protected_call(me);
5011 reg_cfp->iseq != method_entry_iseqptr(me) &&
5014 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5018 "self has wrong type to call super in this context: "
5019 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5024 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5026 "implicit argument passing of super from method defined"
5027 " by define_method() is not supported."
5028 " Specify all arguments explicitly.");
5031 ID mid = me->def->original_id;
5033 if (!vm_ci_markable(cd->ci)) {
5034 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5038 cd->ci = vm_ci_new_runtime(mid,
5041 vm_ci_kwarg(cd->ci));
5048 VALUE klass = vm_search_normal_superclass(me->defined_class);
5052 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5056 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5060 if (cached_cme == NULL) {
5062 cd->cc = empty_cc_for_super();
5064 else if (cached_cme->called_id != mid) {
5067 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5071 cd->cc = cc = empty_cc_for_super();
5075 switch (cached_cme->def->type) {
5077 case VM_METHOD_TYPE_REFINED:
5079 case VM_METHOD_TYPE_ATTRSET:
5080 case VM_METHOD_TYPE_IVAR:
5081 vm_cc_call_set(cc, vm_call_super_method);
5089 VM_ASSERT((vm_cc_cme(cc),
true));
5097 block_proc_is_lambda(
const VALUE procval)
5102 GetProcPtr(procval, proc);
5103 return proc->is_lambda;
5113 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5116 int is_lambda = FALSE;
5117 VALUE val, arg, blockarg;
5119 const struct vm_ifunc *ifunc = captured->code.ifunc;
5124 else if (argc == 0) {
5131 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5133 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5135 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5138 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5141 VM_GUARDED_PREV_EP(captured->ep),
5143 0, ec->cfp->sp, 0, 0);
5144 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5145 rb_vm_pop_frame(ec);
5153 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5159 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5168 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5170 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5178 vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5180 VALUE ary, arg0 = argv[0];
5185 VM_ASSERT(argv[0] == arg0);
5193 if (rb_simple_iseq_p(iseq)) {
5197 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5199 if (arg_setup_type == arg_setup_block &&
5200 calling->argc == 1 &&
5201 ISEQ_BODY(iseq)->param.flags.has_lead &&
5202 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5203 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5204 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5207 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5208 if (arg_setup_type == arg_setup_block) {
5209 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5211 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5212 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5213 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5215 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5216 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5220 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5227 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5236 calling = &calling_entry;
5237 calling->argc = argc;
5238 calling->block_handler = block_handler;
5239 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5241 calling->heap_argv = 0;
5242 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5244 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5252 bool is_lambda,
VALUE block_handler)
5255 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5256 const int arg_size = ISEQ_BODY(iseq)->param.size;
5257 VALUE *
const rsp = GET_SP() - calling->argc;
5258 VALUE *
const argv = rsp;
5259 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5263 vm_push_frame(ec, iseq,
5264 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5266 VM_GUARDED_PREV_EP(captured->ep), 0,
5267 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5269 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5277 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5279 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5280 int flags = vm_ci_flag(ci);
5282 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5283 ((calling->argc == 0) ||
5284 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5285 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5286 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5287 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5289 if (UNLIKELY(calling->heap_argv)) {
5290 #if VM_ARGC_STACK_MAX < 0
5298 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5299 reg_cfp->sp[-2] = calling->recv;
5300 flags |= VM_CALL_ARGS_SPLAT;
5303 if (calling->argc < 1) {
5306 calling->recv = TOPN(--calling->argc);
5308 if (calling->kw_splat) {
5309 flags |= VM_CALL_KW_SPLAT;
5313 if (calling->argc < 1) {
5316 calling->recv = TOPN(--calling->argc);
5319 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5325 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5330 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5331 argc = calling->argc;
5332 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5338 vm_proc_to_block_handler(
VALUE procval)
5340 const struct rb_block *block = vm_proc_block(procval);
5342 switch (vm_block_type(block)) {
5343 case block_type_iseq:
5344 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5345 case block_type_ifunc:
5346 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5347 case block_type_symbol:
5348 return VM_BH_FROM_SYMBOL(block->as.symbol);
5349 case block_type_proc:
5350 return VM_BH_FROM_PROC(block->as.proc);
5352 VM_UNREACHABLE(vm_yield_with_proc);
5359 bool is_lambda,
VALUE block_handler)
5361 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5362 VALUE proc = VM_BH_TO_PROC(block_handler);
5363 is_lambda = block_proc_is_lambda(proc);
5364 block_handler = vm_proc_to_block_handler(proc);
5367 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5373 bool is_lambda,
VALUE block_handler)
5377 bool is_lambda,
VALUE block_handler);
5379 switch (vm_block_handler_type(block_handler)) {
5380 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5381 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5382 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5383 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5384 default:
rb_bug(
"vm_invoke_block: unreachable");
5387 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5391 vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5398 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5401 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5402 captured->code.iseq = blockiseq;
5404 return rb_vm_make_proc(ec, captured,
rb_cProc);
5408 vm_once_exec(
VALUE iseq)
5415 vm_once_clear(
VALUE data)
5418 is->once.running_thread = NULL;
5430 args[0] = obj; args[1] =
Qfalse;
5432 if (!UNDEF_P(r) &&
RTEST(r)) {
5444 enum defined_type
type = (
enum defined_type)op_type;
5451 return rb_gvar_defined(
SYM2ID(obj));
5453 case DEFINED_CVAR: {
5454 const rb_cref_t *cref = vm_get_cref(GET_EP());
5455 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5460 case DEFINED_CONST_FROM: {
5461 bool allow_nil =
type == DEFINED_CONST;
5463 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5468 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5470 case DEFINED_METHOD:{
5475 switch (METHOD_ENTRY_VISI(me)) {
5476 case METHOD_VISI_PRIVATE:
5478 case METHOD_VISI_PROTECTED:
5482 case METHOD_VISI_PUBLIC:
5486 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5490 return check_respond_to_missing(obj, v);
5495 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5499 case DEFINED_ZSUPER:
5504 VALUE klass = vm_search_normal_superclass(me->defined_class);
5505 if (!klass)
return false;
5507 ID id = me->def->original_id;
5514 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5516 rb_bug(
"unimplemented defined? type (VM)");
5526 return vm_defined(ec, reg_cfp, op_type, obj, v);
5529 static const VALUE *
5530 vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5533 const VALUE *ep = reg_ep;
5534 for (i = 0; i < lv; i++) {
5535 ep = GET_PREV_EP(ep);
5541 vm_get_special_object(
const VALUE *
const reg_ep,
5542 enum vm_special_object_type
type)
5545 case VM_SPECIAL_OBJECT_VMCORE:
5546 return rb_mRubyVMFrozenCore;
5547 case VM_SPECIAL_OBJECT_CBASE:
5548 return vm_get_cbase(reg_ep);
5549 case VM_SPECIAL_OBJECT_CONST_BASE:
5550 return vm_get_const_base(reg_ep);
5552 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5559 const VALUE ary2 = ary2st;
5560 VALUE tmp1 = rb_check_to_array(ary1);
5561 VALUE tmp2 = rb_check_to_array(ary2);
5581 const VALUE ary2 = ary2st;
5582 VALUE tmp2 = rb_check_to_array(ary2);
5596 return vm_concat_array(ary1, ary2st);
5600 rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5602 return vm_concat_to_array(ary1, ary2st);
5608 VALUE tmp = rb_check_to_array(ary);
5612 else if (
RTEST(flag)) {
5625 return vm_splat_array(flag, ary);
5631 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5633 if (flag & VM_CHECKMATCH_ARRAY) {
5637 for (i = 0; i < n; i++) {
5639 VALUE c = check_match(ec, v, target,
type);
5648 return check_match(ec, pattern, target,
type);
5655 return vm_check_match(ec, target, pattern, flag);
5659 vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5661 const VALUE kw_bits = *(ep - bits);
5664 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5665 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5678 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5679 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5680 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5681 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5685 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5688 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5691 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5694 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5701 vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5706 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5707 return rb_public_const_get_at(cbase,
id);
5715 vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5720 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5725 "superclass mismatch for class %"PRIsVALUE
"",
5738 vm_check_if_module(
ID id,
VALUE mod)
5757 vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5760 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5768 vm_declare_module(
ID id,
VALUE cbase)
5774 NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5781 VALUE location = rb_const_source_location_at(cbase,
id);
5782 if (!
NIL_P(location)) {
5783 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5784 " previous definition of %"PRIsVALUE
" was here",
5791 vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5795 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5797 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5801 vm_check_if_namespace(cbase);
5805 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5806 if (!vm_check_if_class(
id, flags, super, klass))
5807 unmatched_redefinition(
"class", cbase,
id, klass);
5811 return vm_declare_class(
id, flags, cbase, super);
5816 vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5820 vm_check_if_namespace(cbase);
5821 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5822 if (!vm_check_if_module(
id, mod))
5823 unmatched_redefinition(
"module", cbase,
id, mod);
5827 return vm_declare_module(
id, cbase);
5832 vm_find_or_create_class_by_id(
ID id,
5837 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5840 case VM_DEFINECLASS_TYPE_CLASS:
5842 return vm_define_class(
id, flags, cbase, super);
5844 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5848 case VM_DEFINECLASS_TYPE_MODULE:
5850 return vm_define_module(
id, flags, cbase);
5853 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5857 static rb_method_visibility_t
5862 if (!vm_env_cref_by_cref(cfp->ep)) {
5863 return METHOD_VISI_PUBLIC;
5866 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5875 if (!vm_env_cref_by_cref(cfp->ep)) {
5879 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5887 rb_method_visibility_t visi;
5892 visi = METHOD_VISI_PUBLIC;
5895 klass = CREF_CLASS_FOR_DEFINITION(cref);
5896 visi = vm_scope_visibility_get(ec);
5903 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5907 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5910 if (!is_singleton && vm_scope_module_func_check(ec)) {
5912 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5922 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5924 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5925 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5928 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5932 enum method_explorer_type {
5934 mexp_search_invokeblock,
5943 VALUE block_handler,
5944 enum method_explorer_type method_explorer
5949 int argc = vm_ci_argc(ci);
5950 VALUE recv = TOPN(argc);
5952 .block_handler = block_handler,
5953 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5959 switch (method_explorer) {
5960 case mexp_search_method:
5961 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5962 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5964 case mexp_search_super:
5965 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5966 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5968 case mexp_search_invokeblock:
5969 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5986 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5987 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5989 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5991 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
5996 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5997 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6008 VALUE bh = VM_BLOCK_HANDLER_NONE;
6009 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6024 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6025 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6027 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6029 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6034 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6035 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6046 VALUE bh = VM_BLOCK_HANDLER_NONE;
6047 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6084 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6090 val = rb_mod_to_s(recv);
6096 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6097 return rb_nil_to_s(recv);
6101 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6102 return rb_true_to_s(recv);
6106 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6107 return rb_false_to_s(recv);
6111 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6112 return rb_fix_to_s(recv);
6120 vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6122 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6131 vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6133 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6142 vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6144 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6153 #define id_cmp idCmp
6158 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6164 rb_snum_t i = num - 1;
6167 if (OPTIMIZED_CMP(v, result) > 0) {
6182 return vm_opt_newarray_max(ec, num,
ptr);
6188 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6194 rb_snum_t i = num - 1;
6197 if (OPTIMIZED_CMP(v, result) < 0) {
6212 return vm_opt_newarray_min(ec, num,
ptr);
6219 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6220 return rb_ary_hash_values(num,
ptr);
6230 return vm_opt_newarray_hash(ec, num,
ptr);
6239 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6241 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6242 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6252 if (!UNDEF_P(buffer)) {
6253 args[1] = rb_hash_new_with_size(1);
6259 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6266 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6272 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6278 vm_track_constant_cache(
ID id,
void *ic)
6280 struct rb_id_table *const_cache = GET_VM()->constant_cache;
6281 VALUE lookup_result;
6284 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6288 ics = st_init_numtable();
6289 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6292 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6300 for (
int i = 0; segments[i]; i++) {
6301 ID id = segments[i];
6302 if (
id == idNULL)
continue;
6303 vm_track_constant_cache(
id, ic);
6313 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6314 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6316 return (ic_cref == NULL ||
6317 ic_cref == vm_get_cref(reg_ep));
6325 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6326 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6331 rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6333 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6339 if (ruby_vm_const_missing_count > 0) {
6340 ruby_vm_const_missing_count = 0;
6347 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6352 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6353 rb_yjit_constant_ic_update(iseq, ic, pos);
6354 rb_rjit_constant_ic_update(iseq, ic, pos);
6363 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6366 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6369 ruby_vm_constant_cache_misses++;
6370 val = vm_get_ev_const_chain(ec, segments);
6371 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6374 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6386 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6387 return is->once.value;
6389 else if (is->once.running_thread == NULL) {
6391 is->once.running_thread = th;
6395 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6398 else if (is->once.running_thread == th) {
6400 return vm_once_exec((
VALUE)iseq);
6404 RUBY_VM_CHECK_INTS(ec);
6411 vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6413 switch (OBJ_BUILTIN_TYPE(key)) {
6419 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6420 SYMBOL_REDEFINED_OP_FLAG |
6421 INTEGER_REDEFINED_OP_FLAG |
6422 FLOAT_REDEFINED_OP_FLAG |
6423 NIL_REDEFINED_OP_FLAG |
6424 TRUE_REDEFINED_OP_FLAG |
6425 FALSE_REDEFINED_OP_FLAG |
6426 STRING_REDEFINED_OP_FLAG)) {
6430 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6434 if (rb_hash_stlike_lookup(hash, key, &val)) {
6445 NORETURN(
static void
6454 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6455 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6456 static const char stack_consistency_error[] =
6457 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6458 #if defined RUBY_DEVEL
6464 rb_bug(stack_consistency_error, nsp, nbp);
6471 if (FIXNUM_2_P(recv, obj) &&
6472 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6473 return rb_fix_plus_fix(recv, obj);
6475 else if (FLONUM_2_P(recv, obj) &&
6476 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6484 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6489 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6490 return rb_str_opt_plus(recv, obj);
6494 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6505 if (FIXNUM_2_P(recv, obj) &&
6506 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6507 return rb_fix_minus_fix(recv, obj);
6509 else if (FLONUM_2_P(recv, obj) &&
6510 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6518 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6529 if (FIXNUM_2_P(recv, obj) &&
6530 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6531 return rb_fix_mul_fix(recv, obj);
6533 else if (FLONUM_2_P(recv, obj) &&
6534 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6542 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6553 if (FIXNUM_2_P(recv, obj) &&
6554 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6555 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6557 else if (FLONUM_2_P(recv, obj) &&
6558 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6559 return rb_flo_div_flo(recv, obj);
6566 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6567 return rb_flo_div_flo(recv, obj);
6577 if (FIXNUM_2_P(recv, obj) &&
6578 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6579 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6581 else if (FLONUM_2_P(recv, obj) &&
6582 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6590 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6601 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6602 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6604 if (!UNDEF_P(val)) {
6605 return RBOOL(!
RTEST(val));
6615 if (FIXNUM_2_P(recv, obj) &&
6616 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6619 else if (FLONUM_2_P(recv, obj) &&
6620 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6628 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6640 if (FIXNUM_2_P(recv, obj) &&
6641 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6644 else if (FLONUM_2_P(recv, obj) &&
6645 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6653 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6665 if (FIXNUM_2_P(recv, obj) &&
6666 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6669 else if (FLONUM_2_P(recv, obj) &&
6670 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6678 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6690 if (FIXNUM_2_P(recv, obj) &&
6691 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6694 else if (FLONUM_2_P(recv, obj) &&
6695 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6703 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6720 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6729 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6747 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6758 if (FIXNUM_2_P(recv, obj) &&
6759 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6771 if (FIXNUM_2_P(recv, obj) &&
6772 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6773 return rb_fix_aref(recv, obj);
6778 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6780 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6783 return rb_ary_aref1(recv, obj);
6787 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6802 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6808 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6821 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6822 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6823 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6834 return vm_opt_aref_with(recv, key);
6841 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6842 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6851 vm_opt_length(
VALUE recv,
int bop)
6857 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6858 if (bop == BOP_EMPTY_P) {
6866 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6870 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6879 vm_opt_empty_p(
VALUE recv)
6881 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6894 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6897 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6913 case RSHIFT(~0UL, 1):
6916 return
rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6934 vm_opt_succ(
VALUE recv)
6937 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6938 return fix_succ(recv);
6944 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6955 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6956 return RBOOL(!
RTEST(recv));
6971 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6975 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6993 VALUE self = GET_SELF();
6995 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6997 if (event & global_hooks->events) {
7000 vm_dtrace(event, ec);
7001 rb_exec_event_hook_orig(ec, global_hooks, event,
self, 0, 0, 0 , val, 0);
7007 if (local_hooks != NULL) {
7008 if (event & local_hooks->events) {
7011 rb_exec_event_hook_orig(ec, local_hooks, event,
self, 0, 0, 0 , val, 0);
7017 #define VM_TRACE_HOOK(target_event, val) do { \
7018 if ((pc_events & (target_event)) & enabled_flags) { \
7019 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7026 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7027 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7028 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7034 const VALUE *pc = reg_cfp->pc;
7035 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7038 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7044 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7047 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7048 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7052 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7053 enabled_flags |= iseq_local_events;
7055 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7057 if (bmethod_frame) {
7059 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7060 bmethod_local_hooks = me->def->body.bmethod.hooks;
7061 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7062 if (bmethod_local_hooks) {
7063 bmethod_local_events = bmethod_local_hooks->events;
7068 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7072 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7080 else if (ec->trace_arg != NULL) {
7088 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7091 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7095 (
int)rb_iseq_line_no(iseq, pos),
7098 VM_ASSERT(reg_cfp->pc == pc);
7099 VM_ASSERT(pc_events != 0);
7109 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7110 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7126 #undef VM_TRACE_HOOK
7128 #if VM_CHECK_MODE > 0
7129 NORETURN( NOINLINE( COLDFUNC
7130 void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7133 Init_vm_stack_canary(
void)
7136 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7137 vm_stack_canary |= 0x01;
7139 vm_stack_canary_was_born =
true;
7144 rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7148 const char *insn = rb_insns_name(i);
7152 rb_bug(
"dead canary found at %s: %s", insn, str);
7156 void Init_vm_stack_canary(
void) { }
7188 return (*(rb_invoke_funcptr0_t)funcptr)(ec,
self);
7195 return (*(rb_invoke_funcptr1_t)funcptr)(ec,
self, argv[0]);
7202 return (*(rb_invoke_funcptr2_t)funcptr)(ec,
self, argv[0], argv[1]);
7209 return (*(rb_invoke_funcptr3_t)funcptr)(ec,
self, argv[0], argv[1], argv[2]);
7216 return (*(rb_invoke_funcptr4_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3]);
7223 return (*(rb_invoke_funcptr5_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7230 return (*(rb_invoke_funcptr6_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7237 return (*(rb_invoke_funcptr7_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7244 return (*(rb_invoke_funcptr8_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7250 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7251 return (*(rb_invoke_funcptr9_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7257 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7258 return (*(rb_invoke_funcptr10_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7264 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7265 return (*(rb_invoke_funcptr11_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7271 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7272 return (*(rb_invoke_funcptr12_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7278 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7279 return (*(rb_invoke_funcptr13_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7285 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7286 return (*(rb_invoke_funcptr14_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7292 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7293 return (*(rb_invoke_funcptr15_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7298 static builtin_invoker
7299 lookup_builtin_invoker(
int argc)
7301 static const builtin_invoker invokers[] = {
7320 return invokers[argc];
7326 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7327 SETUP_CANARY(canary_p);
7328 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7329 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7330 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7337 return invoke_bf(ec, cfp, bf, argv);
7344 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7345 for (
int i=0; i<bf->argc; i++) {
7346 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7348 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7349 (
void *)(uintptr_t)bf->func_ptr);
7352 if (bf->argc == 0) {
7353 return invoke_bf(ec, cfp, bf, NULL);
7356 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7357 return invoke_bf(ec, cfp, bf, argv);
7367 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
int rb_during_gc(void)
Queries if the GC is busy.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_unshift(VALUE ary, VALUE elem)
Destructively prepends the passed item at the beginning of the passed array.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_cat_cstr(VALUE dst, const char *src)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well...
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
VALUE rb_uint2big(uintptr_t i)
Converts a C's intptr_t into an instance of rb_cInteger.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
void * ruby_xcalloc(size_t nelems, size_t elemsiz)
Identical to ruby_xmalloc2(), except it returns a zero-filled storage instance.