11 #include "ruby/internal/config.h"
15 #ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20 #include "debug_counter.h"
22 #include "internal/class.h"
23 #include "internal/compar.h"
24 #include "internal/hash.h"
25 #include "internal/numeric.h"
26 #include "internal/proc.h"
27 #include "internal/random.h"
28 #include "internal/variable.h"
29 #include "internal/struct.h"
34 #include "insns_info.inc"
40 int argc,
const VALUE *argv,
int priv);
50 ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73 NORETURN(
static void vm_stackoverflow(
void));
76 vm_stackoverflow(
void)
78 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 #ifdef USE_SIGALTSTACK
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
102 #if VM_CHECK_MODE > 0
104 callable_class_p(
VALUE klass)
106 #if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147 vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237 static VALUE vm_stack_canary;
238 static bool vm_stack_canary_was_born =
false;
245 unsigned int pos = 0;
246 while (pos < ISEQ_BODY(iseq)->iseq_size) {
247 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
248 unsigned int next_pos = pos + insn_len(opcode);
249 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
254 rb_bug(
"failed to find the previous insn");
263 if (! LIKELY(vm_stack_canary_was_born)) {
266 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
270 else if (! (iseq = GET_ISEQ())) {
273 else if (LIKELY(sp[0] != vm_stack_canary)) {
282 const VALUE *orig = rb_iseq_original_iseq(iseq);
283 const VALUE iseqw = rb_iseqw_new(iseq);
285 const char *stri = rb_str_to_cstr(inspection);
286 const VALUE disasm = rb_iseq_disasm(iseq);
287 const char *strd = rb_str_to_cstr(disasm);
288 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
289 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
290 const char *name = insn_name(insn);
296 "We are killing the stack canary set by %s, "
297 "at %s@pc=%"PRIdPTR
"\n"
298 "watch out the C stack trace.\n"
300 name, stri, pos, strd);
303 #define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
306 #define vm_check_canary(ec, sp)
307 #define vm_check_frame(a, b, c, d)
310 #if USE_DEBUG_COUNTER
312 vm_push_frame_debug_counter_inc(
319 RB_DEBUG_COUNTER_INC(frame_push);
321 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
322 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
323 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
326 RB_DEBUG_COUNTER_INC(frame_R2R);
329 RB_DEBUG_COUNTER_INC(frame_R2C);
334 RB_DEBUG_COUNTER_INC(frame_C2R);
337 RB_DEBUG_COUNTER_INC(frame_C2C);
342 switch (
type & VM_FRAME_MAGIC_MASK) {
343 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
344 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
345 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
346 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
347 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
348 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
349 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
350 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
351 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
357 #define vm_push_frame_debug_counter_inc(ec, cfp, t)
362 rb_vm_stack_canary(
void)
364 #if VM_CHECK_MODE > 0
365 return vm_stack_canary;
371 STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
372 STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
373 STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
389 vm_check_frame(
type, specval, cref_or_me, iseq);
390 VM_ASSERT(local_size >= 0);
393 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
394 vm_check_canary(ec, sp);
399 for (
int i=0; i < local_size; i++) {
416 #if VM_DEBUG_BP_CHECK
426 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
427 atomic_signal_fence(memory_order_seq_cst);
435 vm_push_frame_debug_counter_inc(ec, cfp,
type);
443 if (VMDEBUG == 2) SDR();
445 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
452 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
454 if (VMDEBUG == 2) SDR();
456 RUBY_VM_CHECK_INTS(ec);
457 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
459 return flags & VM_FRAME_FLAG_FINISH;
465 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
472 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
474 rb_imemo_tmpbuf_set_ptr(tmpbuf,
ptr);
478 dmy_iseq->body = dmy_body;
479 dmy_body->type = ISEQ_TYPE_TOP;
480 dmy_body->location.pathobj = fname;
484 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
486 VM_BLOCK_HANDLER_NONE,
498 rb_arity_error_new(
int argc,
int min,
int max)
500 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
515 rb_error_arity(
int argc,
int min,
int max)
522 NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
525 vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
528 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
529 VM_FORCE_WRITE(&ep[index], v);
530 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
531 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
536 vm_env_write(
const VALUE *ep,
int index,
VALUE v)
538 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
539 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
540 VM_STACK_ENV_WRITE(ep, index, v);
543 vm_env_write_slowpath(ep, index, v);
548 rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
550 vm_env_write(ep, index, v);
556 if (block_handler == VM_BLOCK_HANDLER_NONE) {
560 switch (vm_block_handler_type(block_handler)) {
561 case block_handler_type_iseq:
562 case block_handler_type_ifunc:
563 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
564 case block_handler_type_symbol:
565 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
566 case block_handler_type_proc:
567 return VM_BH_TO_PROC(block_handler);
569 VM_UNREACHABLE(rb_vm_bh_to_procval);
576 #if VM_CHECK_MODE > 0
578 vm_svar_valid_p(
VALUE svar)
581 switch (imemo_type(svar)) {
590 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
600 if (lep && (ec == NULL || ec->root_lep != lep)) {
601 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
604 svar = ec->root_svar;
607 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
615 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
617 if (lep && (ec == NULL || ec->root_lep != lep)) {
618 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
621 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->
self, &ec->root_svar, svar);
628 const struct vm_svar *svar = lep_svar(ec, lep);
633 case VM_SVAR_LASTLINE:
634 return svar->lastline;
635 case VM_SVAR_BACKREF:
636 return svar->backref;
638 const VALUE ary = svar->others;
653 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
664 struct vm_svar *svar = lep_svar(ec, lep);
667 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
671 case VM_SVAR_LASTLINE:
674 case VM_SVAR_BACKREF:
678 VALUE ary = svar->others;
694 val = lep_svar_get(ec, lep, key);
697 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
714 rb_bug(
"unexpected back-ref");
727 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
737 return rb_reg_last_defined(backref);
739 rb_bug(
"unexpected back-ref");
743 nth = (int)(
type >> 1);
750 check_method_entry(
VALUE obj,
int can_be_svar)
752 if (obj ==
Qfalse)
return NULL;
754 #if VM_CHECK_MODE > 0
758 switch (imemo_type(obj)) {
768 #if VM_CHECK_MODE > 0
769 rb_bug(
"check_method_entry: svar should not be there:");
778 const VALUE *ep = cfp->ep;
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
783 ep = VM_ENV_PREV_EP(ep);
786 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
iseqptr;
803 switch (me->def->type) {
804 case VM_METHOD_TYPE_ISEQ:
805 return me->def->body.iseq.
cref;
811 #if VM_CHECK_MODE == 0
815 check_cref(
VALUE obj,
int can_be_svar)
817 if (obj ==
Qfalse)
return NULL;
819 #if VM_CHECK_MODE > 0
823 switch (imemo_type(obj)) {
833 #if VM_CHECK_MODE > 0
834 rb_bug(
"check_method_entry: svar should not be there:");
841 vm_env_cref(
const VALUE *ep)
845 while (!VM_ENV_LOCAL_P(ep)) {
846 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
847 ep = VM_ENV_PREV_EP(ep);
850 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
854 is_cref(
const VALUE v,
int can_be_svar)
857 switch (imemo_type(v)) {
870 vm_env_cref_by_cref(
const VALUE *ep)
872 while (!VM_ENV_LOCAL_P(ep)) {
873 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
874 ep = VM_ENV_PREV_EP(ep);
876 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
880 cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
882 const VALUE v = *vptr;
886 switch (imemo_type(v)) {
889 new_cref = vm_cref_dup(cref);
894 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
899 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
903 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
912 vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
914 if (vm_env_cref_by_cref(ep)) {
918 while (!VM_ENV_LOCAL_P(ep)) {
919 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
920 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
923 ep = VM_ENV_PREV_EP(ep);
925 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
926 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
929 rb_bug(
"vm_cref_dup: unreachable");
934 vm_get_cref(
const VALUE *ep)
942 rb_bug(
"vm_get_cref: unreachable");
947 rb_vm_get_cref(
const VALUE *ep)
949 return vm_get_cref(ep);
960 return vm_get_cref(cfp->ep);
964 vm_get_const_key_cref(
const VALUE *ep)
970 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
971 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
974 cref = CREF_NEXT(cref);
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 *new_cref_ptr = new_cref;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 *new_cref_ptr = new_cref;
995 new_cref_ptr = &new_cref->next;
997 *new_cref_ptr = NULL;
1006 prev_cref = vm_env_cref(ep);
1012 prev_cref = vm_env_cref(cfp->ep);
1016 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1020 vm_get_cbase(
const VALUE *ep)
1022 const rb_cref_t *cref = vm_get_cref(ep);
1024 return CREF_CLASS_FOR_DEFINITION(cref);
1028 vm_get_const_base(
const VALUE *ep)
1030 const rb_cref_t *cref = vm_get_cref(ep);
1033 if (!CREF_PUSHED_BY_EVAL(cref)) {
1034 return CREF_CLASS_FOR_DEFINITION(cref);
1036 cref = CREF_NEXT(cref);
1043 vm_check_if_namespace(
VALUE klass)
1051 vm_ensure_not_refinement_module(
VALUE self)
1054 rb_warn(
"not defined at the refinement, but at the outer class/module");
1070 if (
NIL_P(orig_klass) && allow_nil) {
1072 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1076 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1077 root_cref = CREF_NEXT(root_cref);
1080 while (cref && CREF_NEXT(cref)) {
1081 if (CREF_PUSHED_BY_EVAL(cref)) {
1085 klass = CREF_CLASS(cref);
1087 cref = CREF_NEXT(cref);
1089 if (!
NIL_P(klass)) {
1093 if ((ce = rb_const_lookup(klass,
id))) {
1094 rb_const_warn_if_deprecated(ce, klass,
id);
1097 if (am == klass)
break;
1099 if (is_defined)
return 1;
1100 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1102 goto search_continue;
1109 if (UNLIKELY(!rb_ractor_main_p())) {
1112 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1123 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1124 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1138 vm_check_if_namespace(orig_klass);
1140 return rb_public_const_defined_from(orig_klass,
id);
1143 return rb_public_const_get_from(orig_klass,
id);
1151 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1159 int allow_nil = TRUE;
1160 if (segments[0] == idNULL) {
1165 while (segments[idx]) {
1166 ID id = segments[idx++];
1167 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1180 rb_bug(
"vm_get_cvar_base: no cref");
1183 while (CREF_NEXT(cref) &&
1184 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1185 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1186 cref = CREF_NEXT(cref);
1188 if (top_level_raise && !CREF_NEXT(cref)) {
1192 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1200 ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1202 fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1205 vm_cc_attr_index_set(cc, index, shape_id);
1208 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1212 #define ractor_incidental_shareable_p(cond, val) \
1213 (!(cond) || rb_ractor_shareable_p(val))
1214 #define ractor_object_incidental_shareable_p(obj, val) \
1215 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1217 #define ATTR_INDEX_NOT_SET (attr_index_t)-1
1225 shape_id_t shape_id;
1229 return default_value;
1232 #if SHAPE_IN_BASIC_FLAGS
1233 shape_id = RBASIC_SHAPE_ID(obj);
1241 #if !SHAPE_IN_BASIC_FLAGS
1242 shape_id = ROBJECT_SHAPE_ID(obj);
1248 if (UNLIKELY(!rb_ractor_main_p())) {
1256 if (default_value ==
Qundef) {
1264 ivar_list = RCLASS_IVPTR(obj);
1266 #if !SHAPE_IN_BASIC_FLAGS
1267 shape_id = RCLASS_SHAPE_ID(obj);
1275 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1276 #if !SHAPE_IN_BASIC_FLAGS
1277 shape_id = ivtbl->shape_id;
1279 ivar_list = ivtbl->as.shape.ivptr;
1282 return default_value;
1286 shape_id_t cached_id;
1290 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1293 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1296 if (LIKELY(cached_id == shape_id)) {
1297 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1299 if (index == ATTR_INDEX_NOT_SET) {
1300 return default_value;
1303 val = ivar_list[index];
1304 #if USE_DEBUG_COUNTER
1305 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1308 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1314 #if USE_DEBUG_COUNTER
1316 if (cached_id != INVALID_SHAPE_ID) {
1317 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1320 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1324 if (cached_id != INVALID_SHAPE_ID) {
1325 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1328 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1331 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1334 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1338 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1343 table = (
st_table *)RCLASS_IVPTR(obj);
1347 table = ROBJECT_IV_HASH(obj);
1352 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1353 table = ivtbl->as.complex.table;
1359 if (!table || !st_lookup(table,
id, &val)) {
1360 val = default_value;
1364 shape_id_t previous_cached_id = cached_id;
1365 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1368 if (cached_id != previous_cached_id) {
1369 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1372 if (index == ATTR_INDEX_NOT_SET) {
1373 val = default_value;
1377 val = ivar_list[index];
1383 vm_cc_attr_index_initialize(cc, shape_id);
1386 vm_ic_attr_index_initialize(ic, shape_id);
1389 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1414 populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1438 rb_check_frozen(obj);
1440 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1442 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1444 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1445 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1448 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1458 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1464 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1467 NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1469 vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1471 #if SHAPE_IN_BASIC_FLAGS
1472 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1474 shape_id_t shape_id = rb_generic_shape_id(obj);
1480 if (shape_id == dest_shape_id) {
1481 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1483 else if (dest_shape_id != INVALID_SHAPE_ID) {
1484 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1485 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1487 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1498 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1500 if (shape_id != dest_shape_id) {
1501 #if SHAPE_IN_BASIC_FLAGS
1502 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1504 ivtbl->shape_id = dest_shape_id;
1510 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1516 vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1524 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1525 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1527 if (LIKELY(shape_id == dest_shape_id)) {
1528 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1531 else if (dest_shape_id != INVALID_SHAPE_ID) {
1532 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1533 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1534 shape_id_t source_shape_id = dest_shape->parent_id;
1536 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1537 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1539 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1541 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1557 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1558 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1564 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1576 VALUE defined_class = 0;
1580 defined_class =
RBASIC(defined_class)->klass;
1583 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1585 rb_bug(
"the cvc table should be set");
1589 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1590 rb_bug(
"should have cvar cache entry");
1595 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1611 cref = vm_get_cref(GET_EP());
1613 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1614 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1616 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1622 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1624 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1630 return vm_getclassvariable(iseq, cfp,
id, ic);
1637 cref = vm_get_cref(GET_EP());
1639 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1640 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1642 rb_class_ivar_set(ic->entry->class_value,
id, val);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1650 update_classvariable_cache(iseq, klass,
id, cref, ic);
1656 vm_setclassvariable(iseq, cfp,
id, val, ic);
1662 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1673 shape_id_t dest_shape_id;
1675 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1677 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1684 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1688 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1695 vm_setinstancevariable(iseq, obj,
id, val, ic);
1704 ec->tag->state = RUBY_TAG_FATAL;
1707 ec->tag->state = TAG_THROW;
1709 else if (THROW_DATA_P(err)) {
1710 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1713 ec->tag->state = TAG_RAISE;
1720 const int flag,
const VALUE throwobj)
1728 else if (state == TAG_BREAK) {
1730 const VALUE *ep = GET_EP();
1731 const rb_iseq_t *base_iseq = GET_ISEQ();
1732 escape_cfp = reg_cfp;
1734 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1735 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1736 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1737 ep = escape_cfp->ep;
1738 base_iseq = escape_cfp->iseq;
1741 ep = VM_ENV_PREV_EP(ep);
1742 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1743 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1744 VM_ASSERT(escape_cfp->iseq == base_iseq);
1748 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1754 ep = VM_ENV_PREV_EP(ep);
1756 while (escape_cfp < eocfp) {
1757 if (escape_cfp->ep == ep) {
1758 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1759 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1764 for (i=0; i < ct->size; i++) {
1766 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1768 if (entry->type == CATCH_TYPE_BREAK &&
1769 entry->iseq == base_iseq &&
1770 entry->start < epc && entry->end >= epc) {
1771 if (entry->cont == epc) {
1780 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1785 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1788 else if (state == TAG_RETRY) {
1789 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1791 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1793 else if (state == TAG_RETURN) {
1794 const VALUE *current_ep = GET_EP();
1795 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1796 int in_class_frame = 0;
1798 escape_cfp = reg_cfp;
1801 while (!VM_ENV_LOCAL_P(ep)) {
1802 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1805 ep = VM_ENV_PREV_EP(ep);
1809 while (escape_cfp < eocfp) {
1810 const VALUE *lep = VM_CF_LEP(escape_cfp);
1816 if (lep == target_lep &&
1817 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1818 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1823 if (lep == target_lep) {
1824 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1826 if (in_class_frame) {
1831 const VALUE *tep = current_ep;
1833 while (target_lep != tep) {
1834 if (escape_cfp->ep == tep) {
1836 if (tep == target_ep) {
1840 goto unexpected_return;
1843 tep = VM_ENV_PREV_EP(tep);
1847 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1848 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1850 case ISEQ_TYPE_MAIN:
1852 if (in_class_frame)
goto unexpected_return;
1853 if (target_ep == NULL) {
1857 goto unexpected_return;
1861 case ISEQ_TYPE_EVAL: {
1863 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1864 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1865 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1866 t = ISEQ_BODY(is)->type;
1868 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1871 case ISEQ_TYPE_CLASS:
1880 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1881 if (target_ep == NULL) {
1885 goto unexpected_return;
1889 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1892 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1898 rb_bug(
"isns(throw): unsupported throw type");
1901 ec->tag->state = state;
1902 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1907 rb_num_t throw_state,
VALUE throwobj)
1909 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1910 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1913 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1916 return vm_throw_continue(ec, throwobj);
1923 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1929 int is_splat = flag & 0x01;
1932 const VALUE obj = ary;
1944 if (num + is_splat == 0) {
1947 else if (flag & 0x02) {
1952 for (i = 0; i < num -
len; i++) {
1957 for (j = 0; i < num; i++, j++) {
1979 for (; i < num -
len; i++) {
1983 for (rb_num_t j = 0; i < num; i++, j++) {
1984 *cfp->sp++ =
ptr[
len - j - 1];
1988 for (rb_num_t j = 0; j < num; j++) {
1989 *cfp->sp++ =
ptr[num - j - 1];
2005 #if VM_CHECK_MODE > 0
2006 ccs->debug_sig = ~(
VALUE)ccs;
2012 ccs->entries = NULL;
2014 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2022 if (! vm_cc_markable(cc)) {
2026 if (UNLIKELY(ccs->len == ccs->capa)) {
2027 if (ccs->capa == 0) {
2029 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2033 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2036 VM_ASSERT(ccs->len < ccs->capa);
2038 const int pos = ccs->len++;
2039 ccs->entries[pos].argc = vm_ci_argc(ci);
2040 ccs->entries[pos].flag = vm_ci_flag(ci);
2043 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2049 #if VM_CHECK_MODE > 0
2053 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2054 for (
int i=0; i<ccs->len; i++) {
2055 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2056 ccs->entries[i].flag,
2057 ccs->entries[i].argc);
2058 rp(ccs->entries[i].cc);
2065 VM_ASSERT(vm_ccs_p(ccs));
2066 VM_ASSERT(ccs->len <= ccs->capa);
2068 for (
int i=0; i<ccs->len; i++) {
2071 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2072 VM_ASSERT(vm_cc_class_check(cc, klass));
2073 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2074 VM_ASSERT(!vm_cc_super_p(cc));
2075 VM_ASSERT(!vm_cc_refinement_p(cc));
2086 const ID mid = vm_ci_mid(ci);
2087 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2094 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2096 const int ccs_len = ccs->len;
2098 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2099 rb_vm_ccs_free(ccs);
2100 rb_id_table_delete(cc_tbl, mid);
2104 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2109 unsigned int argc = vm_ci_argc(ci);
2110 unsigned int flag = vm_ci_flag(ci);
2112 for (
int i=0; i<ccs_len; i++) {
2113 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2114 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2115 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2117 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2119 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2120 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2122 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2123 VM_ASSERT(ccs_cc->klass == klass);
2124 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2133 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2136 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2142 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2144 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2147 cme = rb_callable_method_entry(klass, mid);
2150 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2154 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2155 return &vm_empty_cc;
2158 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2163 VM_ASSERT(cc_tbl != NULL);
2165 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2171 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2175 cme = rb_check_overloaded_cme(cme, ci);
2177 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2178 vm_ccs_push(klass, ccs, ci, cc);
2180 VM_ASSERT(vm_cc_cme(cc) != NULL);
2181 VM_ASSERT(cme->called_id == mid);
2182 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2196 cc = vm_search_cc(klass, ci);
2199 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2200 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2201 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2202 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2203 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2213 #if USE_DEBUG_COUNTER
2217 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2219 #if OPT_INLINE_METHOD_CACHE
2223 if (cd_owner && cc != empty_cc) {
2227 #if USE_DEBUG_COUNTER
2228 if (!old_cc || old_cc == empty_cc) {
2230 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2232 else if (old_cc == cc) {
2233 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2235 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2238 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2239 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2240 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2243 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2248 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2249 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2260 #if OPT_INLINE_METHOD_CACHE
2261 if (LIKELY(vm_cc_class_check(cc, klass))) {
2262 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2263 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2264 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2265 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2266 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2267 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2271 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2274 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2278 return vm_search_method_slowpath0(cd_owner, cd, klass);
2285 VM_ASSERT(klass !=
Qfalse);
2288 return vm_search_method_fastpath(cd_owner, cd, klass);
2291 #if __has_attribute(transparent_union)
2304 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2306 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2307 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2308 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2309 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2312 # define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2315 # define make_cfunc_type(f) (cfunc_type)(f)
2325 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2326 VM_ASSERT(callable_method_entry_p(me));
2328 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2332 #if __has_attribute(transparent_union)
2333 return me->def->body.cfunc.func == func.anyargs;
2335 return me->def->body.cfunc.func == func;
2344 VM_ASSERT(iseq != NULL);
2346 return check_cfunc(vm_cc_cme(cc), func);
2349 #define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2350 #define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2352 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2384 opt_equality_specialized(
VALUE recv,
VALUE obj)
2386 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2387 goto compare_by_identity;
2389 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2390 goto compare_by_identity;
2393 goto compare_by_identity;
2402 #if MSC_VERSION_BEFORE(1300)
2406 else if (isnan(b)) {
2411 return RBOOL(a == b);
2418 return rb_str_eql_internal(obj, recv);
2423 compare_by_identity:
2424 return RBOOL(recv == obj);
2430 VM_ASSERT(cd_owner != NULL);
2432 VALUE val = opt_equality_specialized(recv, obj);
2433 if (!UNDEF_P(val))
return val;
2435 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2439 return RBOOL(recv == obj);
2443 #undef EQ_UNREDEFINED_P
2446 NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2449 opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2451 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2453 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2454 return RBOOL(recv == obj);
2464 VALUE val = opt_equality_specialized(recv, obj);
2465 if (!UNDEF_P(val)) {
2469 return opt_equality_by_mid_slowpath(recv, obj, mid);
2476 return opt_equality_by_mid(obj1, obj2, idEq);
2482 return opt_equality_by_mid(obj1, obj2, idEqlP);
2492 case VM_CHECKMATCH_TYPE_WHEN:
2494 case VM_CHECKMATCH_TYPE_RESCUE:
2499 case VM_CHECKMATCH_TYPE_CASE: {
2500 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2503 rb_bug(
"check_match: unreachable");
2508 #if MSC_VERSION_BEFORE(1300)
2509 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2511 #define CHECK_CMP_NAN(a, b)
2515 double_cmp_lt(
double a,
double b)
2517 CHECK_CMP_NAN(a, b);
2518 return RBOOL(a < b);
2522 double_cmp_le(
double a,
double b)
2524 CHECK_CMP_NAN(a, b);
2525 return RBOOL(a <= b);
2529 double_cmp_gt(
double a,
double b)
2531 CHECK_CMP_NAN(a, b);
2532 return RBOOL(a > b);
2536 double_cmp_ge(
double a,
double b)
2538 CHECK_CMP_NAN(a, b);
2539 return RBOOL(a >= b);
2543 static inline VALUE *
2548 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2549 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2551 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2552 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2553 int params = ISEQ_BODY(cfp->iseq)->param.size;
2556 bp += vm_ci_argc(ci);
2559 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2563 #if VM_DEBUG_BP_CHECK
2564 if (bp != cfp->bp_check) {
2565 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2566 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2567 (
long)(bp - GET_EC()->vm_stack));
2568 rb_bug(
"vm_base_ptr: unreachable");
2581 return vm_base_ptr(cfp);
2586 #include "vm_args.c"
2596 static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2601 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2603 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2609 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2612 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2613 int param = ISEQ_BODY(iseq)->param.size;
2614 int local = ISEQ_BODY(iseq)->local_table_size;
2615 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2621 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2632 rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2634 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2635 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2641 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2645 rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2647 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2651 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2653 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2656 #define ALLOW_HEAP_ARGV (-2)
2657 #define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2662 vm_check_canary(GET_EC(), cfp->sp);
2668 int argc = calling->argc;
2670 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2674 VALUE *argv = cfp->sp - argc;
2678 cfp->sp -= argc - 1;
2679 cfp->sp[-1] = argv_ary;
2681 calling->heap_argv = argv_ary;
2687 if (max_args >= 0 &&
len + argc > max_args) {
2695 calling->argc +=
len - (max_args - argc + 1);
2696 len = max_args - argc + 1;
2705 calling->heap_argv = 0;
2707 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2709 for (i = 0; i <
len; i++) {
2710 *cfp->sp++ =
ptr[i];
2722 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2723 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2724 const VALUE h = rb_hash_new_with_size(kw_len);
2725 VALUE *sp = cfp->sp;
2728 for (i=0; i<kw_len; i++) {
2733 cfp->sp -= kw_len - 1;
2734 calling->argc -= kw_len - 1;
2735 calling->kw_splat = 1;
2739 vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2742 if (keyword_hash !=
Qnil) {
2744 keyword_hash =
rb_hash_dup(rb_to_hash_type(keyword_hash));
2747 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2755 return keyword_hash;
2761 const struct rb_callinfo *restrict ci,
int max_args)
2763 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2764 if (IS_ARGS_KW_SPLAT(ci)) {
2766 VM_ASSERT(calling->kw_splat == 1);
2770 VALUE ary = cfp->sp[0];
2771 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2774 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2778 if (UNLIKELY(calling->heap_argv)) {
2780 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2781 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2782 calling->kw_splat = 0;
2790 VM_ASSERT(calling->kw_splat == 1);
2794 calling->kw_splat = 0;
2799 VM_ASSERT(calling->kw_splat == 0);
2803 VALUE ary = cfp->sp[0];
2805 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2810 VALUE last_hash, argv_ary;
2811 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2812 if (!IS_ARGS_KEYWORD(ci) &&
2815 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2820 calling->kw_splat = 1;
2826 if (!IS_ARGS_KEYWORD(ci) &&
2827 calling->argc > 0 &&
2829 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2837 calling->kw_splat = 1;
2843 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2845 VM_ASSERT(calling->kw_splat == 1);
2846 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2851 calling->kw_splat = 0;
2857 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2859 VM_ASSERT(calling->kw_splat == 0);
2865 vm_caller_setup_arg_kw(cfp, calling, ci);
2869 #define USE_OPT_HIST 0
2872 #define OPT_HIST_MAX 64
2873 static int opt_hist[OPT_HIST_MAX+1];
2877 opt_hist_show_results_at_exit(
void)
2879 for (
int i=0; i<OPT_HIST_MAX; i++) {
2880 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2890 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2891 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2892 const int opt = calling->argc - lead_num;
2893 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2894 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2895 const int param = ISEQ_BODY(iseq)->param.size;
2896 const int local = ISEQ_BODY(iseq)->local_table_size;
2897 const int delta = opt_num - opt;
2899 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2902 if (opt_pc < OPT_HIST_MAX) {
2906 opt_hist[OPT_HIST_MAX]++;
2910 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2918 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2919 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2920 const int opt = calling->argc - lead_num;
2921 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2923 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2926 if (opt_pc < OPT_HIST_MAX) {
2930 opt_hist[OPT_HIST_MAX]++;
2934 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2939 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2940 VALUE *
const locals);
2947 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2948 int param_size = ISEQ_BODY(iseq)->param.size;
2949 int local_size = ISEQ_BODY(iseq)->local_table_size;
2952 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2954 local_size = local_size + vm_ci_argc(calling->cd->ci);
2955 param_size = param_size + vm_ci_argc(calling->cd->ci);
2957 cfp->sp[0] = (
VALUE)calling->cd->ci;
2959 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2969 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2970 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2972 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2973 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2975 const int ci_kw_len = kw_arg->keyword_len;
2976 const VALUE *
const ci_keywords = kw_arg->keywords;
2977 VALUE *argv = cfp->sp - calling->argc;
2978 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2979 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2981 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2982 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2984 int param = ISEQ_BODY(iseq)->param.size;
2985 int local = ISEQ_BODY(iseq)->local_table_size;
2986 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2993 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2996 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2997 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2999 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3000 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3001 VALUE *
const argv = cfp->sp - calling->argc;
3002 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3005 for (i=0; i<kw_param->num; i++) {
3006 klocals[i] = kw_param->default_values[i];
3013 int param = ISEQ_BODY(iseq)->param.size;
3014 int local = ISEQ_BODY(iseq)->local_table_size;
3015 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3025 cfp->sp -= (calling->argc + 1);
3026 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3027 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3036 st_table *dup_check_table = vm->unused_block_warning_table;
3045 .v = (
VALUE)cme->def,
3049 if (!vm->unused_block_warning_strict) {
3050 key = (st_data_t)cme->def->original_id;
3052 if (st_lookup(dup_check_table, key, NULL)) {
3062 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3067 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3068 fprintf(stderr,
"key:%p\n", (
void *)key);
3072 if (st_insert(dup_check_table, key, 1)) {
3077 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3079 if (!
NIL_P(m_loc)) {
3080 rb_warning(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3084 rb_warning(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3091 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3096 VM_ASSERT((vm_ci_argc(ci), 1));
3097 VM_ASSERT(vm_cc_cme(cc) != NULL);
3099 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3100 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3101 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3102 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3105 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3106 if (LIKELY(rb_simple_iseq_p(iseq))) {
3108 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3109 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3111 if (calling->argc != lead_num) {
3112 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3116 VM_ASSERT(cc == calling->cc);
3118 if (vm_call_iseq_optimizable_p(ci, cc)) {
3119 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3121 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3122 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3123 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3126 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3131 else if (rb_iseq_only_optparam_p(iseq)) {
3134 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3135 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3137 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3138 const int argc = calling->argc;
3139 const int opt = argc - lead_num;
3141 if (opt < 0 || opt > opt_num) {
3142 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3145 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3146 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3147 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3148 vm_call_cacheable(ci, cc));
3151 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3152 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3153 vm_call_cacheable(ci, cc));
3157 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3158 for (
int i=argc; i<lead_num + opt_num; i++) {
3161 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3163 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3164 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3165 const int argc = calling->argc;
3166 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3168 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3171 if (argc - kw_arg->keyword_len == lead_num) {
3172 const int ci_kw_len = kw_arg->keyword_len;
3173 const VALUE *
const ci_keywords = kw_arg->keywords;
3175 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3177 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3178 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3180 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3181 vm_call_cacheable(ci, cc));
3186 else if (argc == lead_num) {
3188 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3189 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3191 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3193 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3194 vm_call_cacheable(ci, cc));
3220 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3221 bool can_fastpath =
true;
3223 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3225 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3226 ci = vm_ci_new_runtime(
3232 ci = forward_cd->caller_ci;
3234 can_fastpath =
false;
3238 if (!vm_ci_markable(ci)) {
3239 ci = vm_ci_new_runtime(
3244 can_fastpath =
false;
3246 argv[param_size - 1] = (
VALUE)ci;
3247 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3251 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3278 const VALUE * lep = VM_CF_LEP(cfp);
3284 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3289 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3293 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3295 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3296 VALUE * to = cfp->sp - 1;
3300 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3305 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3307 cfp->sp = to + argc;
3326 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3329 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3330 int param_size = ISEQ_BODY(iseq)->param.size;
3331 int local_size = ISEQ_BODY(iseq)->local_table_size;
3333 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3335 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3336 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3342 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3345 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3346 int param_size = ISEQ_BODY(iseq)->param.size;
3347 int local_size = ISEQ_BODY(iseq)->local_table_size;
3349 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3352 local_size = local_size + vm_ci_argc(calling->cd->ci);
3353 param_size = param_size + vm_ci_argc(calling->cd->ci);
3355 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3356 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3361 int opt_pc,
int param_size,
int local_size)
3366 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3367 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3370 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3376 int opt_pc,
int param_size,
int local_size)
3378 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3379 VALUE *argv = cfp->sp - calling->argc;
3380 VALUE *sp = argv + param_size;
3381 cfp->sp = argv - 1 ;
3383 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3384 calling->block_handler, (
VALUE)me,
3385 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3386 local_size - param_size,
3387 ISEQ_BODY(iseq)->stack_max);
3396 VALUE *argv = cfp->sp - calling->argc;
3398 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3399 VALUE *src_argv = argv;
3400 VALUE *sp_orig, *sp;
3401 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3403 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3404 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3405 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3406 dst_captured->code.val = src_captured->code.val;
3407 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3408 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3411 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3415 vm_pop_frame(ec, cfp, cfp->ep);
3418 sp_orig = sp = cfp->sp;
3421 sp[0] = calling->recv;
3425 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3426 *sp++ = src_argv[i];
3429 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3430 calling->recv, calling->block_handler, (
VALUE)me,
3431 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3432 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3433 ISEQ_BODY(iseq)->stack_max);
3441 ractor_unsafe_check(
void)
3443 if (!rb_ractor_main_p()) {
3444 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3451 ractor_unsafe_check();
3459 ractor_unsafe_check();
3461 return (*f)(argc, argv, recv);
3467 ractor_unsafe_check();
3475 ractor_unsafe_check();
3477 return (*f)(recv, argv[0]);
3483 ractor_unsafe_check();
3485 return (*f)(recv, argv[0], argv[1]);
3491 ractor_unsafe_check();
3493 return (*f)(recv, argv[0], argv[1], argv[2]);
3499 ractor_unsafe_check();
3501 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3507 ractor_unsafe_check();
3508 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3509 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3515 ractor_unsafe_check();
3516 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3517 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3523 ractor_unsafe_check();
3524 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3525 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3531 ractor_unsafe_check();
3532 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3533 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3539 ractor_unsafe_check();
3540 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3541 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3547 ractor_unsafe_check();
3548 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3549 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3555 ractor_unsafe_check();
3556 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3557 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3563 ractor_unsafe_check();
3564 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3565 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3571 ractor_unsafe_check();
3572 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3573 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3579 ractor_unsafe_check();
3580 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3581 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3587 ractor_unsafe_check();
3588 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3589 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3603 return (*f)(argc, argv, recv);
3617 return (*f)(recv, argv[0]);
3624 return (*f)(recv, argv[0], argv[1]);
3631 return (*f)(recv, argv[0], argv[1], argv[2]);
3638 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3644 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3645 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3651 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3652 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3658 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3659 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3665 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3686 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3687 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3693 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3694 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3707 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3708 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3714 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3715 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3721 const int ov_flags = RAISED_STACKOVERFLOW;
3722 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3723 if (rb_ec_raised_p(ec, ov_flags)) {
3724 rb_ec_raised_reset(ec, ov_flags);
3730 #define CHECK_CFP_CONSISTENCY(func) \
3731 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3732 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3738 #if VM_DEBUG_VERIFY_METHOD_CACHE
3739 switch (me->def->type) {
3740 case VM_METHOD_TYPE_CFUNC:
3741 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3743 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3745 METHOD_BUG(ATTRSET);
3747 METHOD_BUG(BMETHOD);
3750 METHOD_BUG(OPTIMIZED);
3751 METHOD_BUG(MISSING);
3752 METHOD_BUG(REFINED);
3756 rb_bug(
"wrong method type: %d", me->def->type);
3759 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3766 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3773 VALUE recv = calling->recv;
3774 VALUE block_handler = calling->block_handler;
3775 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3777 if (UNLIKELY(calling->kw_splat)) {
3778 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3781 VM_ASSERT(reg_cfp == ec->cfp);
3783 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3786 vm_push_frame(ec, NULL, frame_type, recv,
3787 block_handler, (
VALUE)me,
3788 0, ec->cfp->sp, 0, 0);
3790 int len = cfunc->argc;
3793 reg_cfp->sp = stack_bottom;
3794 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3796 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3798 rb_vm_pop_frame(ec);
3800 VM_ASSERT(ec->cfp->sp == stack_bottom);
3802 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3803 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3813 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3815 VALUE *sp = ec->cfp->sp;
3816 VALUE recv = *(sp - recv_idx - 1);
3817 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3818 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3819 #if VM_CHECK_MODE > 0
3821 *(GET_EC()->cfp->sp) =
Qfalse;
3823 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3828 rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3830 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3836 int argc = calling->argc;
3837 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3838 VALUE *argv = &stack_bottom[1];
3840 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3847 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3849 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3851 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3852 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3855 VALUE *stack_bottom = reg_cfp->sp - 2;
3857 VM_ASSERT(calling->argc == 1);
3861 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3864 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3866 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3873 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3876 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3877 return vm_call_cfunc_other(ec, reg_cfp, calling);
3881 calling->kw_splat = 0;
3883 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3884 VALUE *sp = stack_bottom;
3885 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3886 for(i = 0; i < argc; i++) {
3891 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3897 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3898 VALUE argv_ary = reg_cfp->sp[-1];
3902 int argc_offset = 0;
3904 if (UNLIKELY(argc > 0 &&
3906 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3908 return vm_call_cfunc_other(ec, reg_cfp, calling);
3912 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3918 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3919 VALUE keyword_hash = reg_cfp->sp[-1];
3922 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3925 return vm_call_cfunc_other(ec, reg_cfp, calling);
3932 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3934 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3935 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3937 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3938 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3940 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3942 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3943 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3947 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3948 return vm_call_cfunc_other(ec, reg_cfp, calling);
3955 RB_DEBUG_COUNTER_INC(ccf_ivar);
3957 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3964 RB_DEBUG_COUNTER_INC(ccf_attrset);
3965 VALUE val = *(cfp->sp - 1);
3967 attr_index_t index = vm_cc_attr_index(cc);
3968 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3969 ID id = vm_cc_cme(cc)->def->body.attr.id;
3970 rb_check_frozen(obj);
3971 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3980 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3981 if (!UNDEF_P(res)) {
3986 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3994 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4004 VALUE procv = cme->def->body.bmethod.proc;
4007 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4012 GetProcPtr(procv, proc);
4013 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4023 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4027 VALUE procv = cme->def->body.bmethod.proc;
4030 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4035 GetProcPtr(procv, proc);
4036 const struct rb_block *block = &proc->block;
4038 while (vm_block_type(block) == block_type_proc) {
4039 block = vm_proc_block(block->as.proc);
4041 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4044 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4045 VALUE *
const argv = cfp->sp - calling->argc;
4046 const int arg_size = ISEQ_BODY(iseq)->param.size;
4049 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4050 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4053 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4058 vm_push_frame(ec, iseq,
4059 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4061 VM_GUARDED_PREV_EP(captured->ep),
4063 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4065 ISEQ_BODY(iseq)->local_table_size - arg_size,
4066 ISEQ_BODY(iseq)->stack_max);
4074 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4078 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4079 if (UNLIKELY(calling->heap_argv)) {
4084 argc = calling->argc;
4087 cfp->sp += - argc - 1;
4090 return vm_call_bmethod_body(ec, calling, argv);
4096 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4100 VALUE procv = cme->def->body.bmethod.proc;
4102 GetProcPtr(procv, proc);
4103 const struct rb_block *block = &proc->block;
4105 while (vm_block_type(block) == block_type_proc) {
4106 block = vm_proc_block(block->as.proc);
4108 if (vm_block_type(block) == block_type_iseq) {
4109 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4110 return vm_call_iseq_bmethod(ec, cfp, calling);
4113 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4114 return vm_call_noniseq_bmethod(ec, cfp, calling);
4118 rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4120 VALUE klass = current_class;
4128 while (
RTEST(klass)) {
4130 if (owner == target_owner) {
4136 return current_class;
4145 if (orig_me->defined_class == 0) {
4146 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4147 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4148 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4150 if (me->def->reference_count == 1) {
4151 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4155 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4163 VM_ASSERT(callable_method_entry_p(cme));
4170 return aliased_callable_method_entry(me);
4176 calling->cc = &VM_CC_ON_STACK(
Qundef,
4179 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4181 return vm_call_method_each_type(ec, cfp, calling);
4184 static enum method_missing_reason
4187 enum method_missing_reason stat = MISSING_NOENTRY;
4188 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4189 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4190 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4200 ASSUME(calling->argc >= 0);
4202 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4203 int argc = calling->argc;
4204 VALUE recv = calling->recv;
4207 flags |= VM_CALL_OPT_SEND;
4209 if (UNLIKELY(! mid)) {
4210 mid = idMethodMissing;
4211 missing_reason = ci_missing_reason(ci);
4212 ec->method_missing_reason = missing_reason;
4215 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4220 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4221 VALUE exc = rb_make_no_method_exception(
4243 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4246 argc = ++calling->argc;
4251 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4252 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4253 VALUE exc = rb_make_no_method_exception(
4266 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4272 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4273 calling->cd = &new_fcd.cd;
4277 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4278 new_fcd.caller_ci = caller_ci;
4281 calling->cc = &VM_CC_ON_STACK(klass,
4283 { .method_missing_reason = missing_reason },
4284 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4286 if (flags & VM_CALL_FCALL) {
4287 return vm_call_method(ec, reg_cfp, calling);
4291 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4293 if (vm_cc_cme(cc) != NULL) {
4294 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4295 case METHOD_VISI_PUBLIC:
4296 return vm_call_method_each_type(ec, reg_cfp, calling);
4297 case METHOD_VISI_PRIVATE:
4298 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4300 case METHOD_VISI_PROTECTED:
4301 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4304 VM_UNREACHABLE(vm_call_method);
4306 return vm_call_method_missing(ec, reg_cfp, calling);
4309 return vm_call_method_nome(ec, reg_cfp, calling);
4319 i = calling->argc - 1;
4321 if (calling->argc == 0) {
4346 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4352 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4354 int flags = VM_CALL_FCALL;
4358 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4359 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4361 flags |= VM_CALL_ARGS_SPLAT;
4362 if (calling->kw_splat) {
4363 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4364 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4365 calling->kw_splat = 0;
4367 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4370 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4371 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4377 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4378 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4384 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4387 int flags = vm_ci_flag(ci);
4389 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4390 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4391 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4392 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4393 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4394 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4397 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4398 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4403 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4405 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4407 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4408 unsigned int argc, flag;
4410 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4411 argc = ++calling->argc;
4414 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4415 vm_check_canary(ec, reg_cfp->sp);
4419 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4422 ec->method_missing_reason = reason;
4426 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4432 if (!(flag & VM_CALL_FORWARDING)) {
4433 calling->cd = &new_fcd.cd;
4437 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4438 new_fcd.caller_ci = caller_ci;
4442 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4443 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4444 return vm_call_method(ec, reg_cfp, calling);
4450 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4461 return vm_call_method_nome(ec, cfp, calling);
4463 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4464 cme->def->body.refined.orig_me) {
4465 cme = refined_method_callable_without_refinement(cme);
4468 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4470 return vm_call_method_each_type(ec, cfp, calling);
4474 find_refinement(
VALUE refinements,
VALUE klass)
4476 if (
NIL_P(refinements)) {
4488 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4489 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4492 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4493 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4497 }
while (cfp->iseq != local_iseq);
4508 if (orig_me->defined_class == 0) {
4516 VM_ASSERT(callable_method_entry_p(cme));
4518 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4528 ID mid = vm_ci_mid(calling->cd->ci);
4529 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4533 for (; cref; cref = CREF_NEXT(cref)) {
4534 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4535 if (
NIL_P(refinement))
continue;
4538 rb_callable_method_entry(refinement, mid);
4541 if (vm_cc_call(cc) == vm_call_super_method) {
4544 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4549 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4550 cme->def != ref_me->def) {
4553 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4562 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4563 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4578 if (calling->cd->cc) {
4579 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4581 return vm_call_method(ec, cfp, calling);
4584 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4585 calling->cc= ref_cc;
4586 return vm_call_method(ec, cfp, calling);
4590 return vm_call_method_nome(ec, cfp, calling);
4596 NOINLINE(
static VALUE
4604 int argc = calling->argc;
4607 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4610 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4616 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4619 VALUE procval = calling->recv;
4620 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4626 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4628 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4631 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4632 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4635 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4636 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4637 return vm_call_general(ec, reg_cfp, calling);
4644 VALUE recv = calling->recv;
4647 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4648 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4650 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4651 return internal_RSTRUCT_GET(recv,
off);
4657 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4659 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4667 VALUE recv = calling->recv;
4670 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4671 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4673 rb_check_frozen(recv);
4675 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4676 internal_RSTRUCT_SET(recv,
off, val);
4684 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4686 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4694 #define VM_CALL_METHOD_ATTR(var, func, nohook) \
4695 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4696 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4697 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4699 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4700 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4711 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4712 case OPTIMIZED_METHOD_TYPE_SEND:
4713 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4714 return vm_call_opt_send(ec, cfp, calling);
4715 case OPTIMIZED_METHOD_TYPE_CALL:
4716 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4717 return vm_call_opt_call(ec, cfp, calling);
4718 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4719 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4720 return vm_call_opt_block_call(ec, cfp, calling);
4721 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4722 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4726 VM_CALL_METHOD_ATTR(v,
4727 vm_call_opt_struct_aref(ec, cfp, calling),
4728 set_vm_cc_ivar(cc); \
4729 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4732 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4733 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4737 VM_CALL_METHOD_ATTR(v,
4738 vm_call_opt_struct_aset(ec, cfp, calling),
4739 set_vm_cc_ivar(cc); \
4740 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4744 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4756 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4758 switch (cme->def->type) {
4759 case VM_METHOD_TYPE_ISEQ:
4760 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4761 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4762 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4765 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4766 return vm_call_iseq_setup(ec, cfp, calling);
4769 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4770 case VM_METHOD_TYPE_CFUNC:
4771 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4772 return vm_call_cfunc(ec, cfp, calling);
4774 case VM_METHOD_TYPE_ATTRSET:
4775 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4779 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4781 if (vm_cc_markable(cc)) {
4782 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4783 VM_CALL_METHOD_ATTR(v,
4784 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4785 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4791 VM_CALLCACHE_UNMARKABLE |
4792 VM_CALLCACHE_ON_STACK,
4798 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4803 VM_CALL_METHOD_ATTR(v,
4804 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4805 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4809 case VM_METHOD_TYPE_IVAR:
4810 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4812 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4813 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4814 VM_CALL_METHOD_ATTR(v,
4815 vm_call_ivar(ec, cfp, calling),
4816 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4819 case VM_METHOD_TYPE_MISSING:
4820 vm_cc_method_missing_reason_set(cc, 0);
4821 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4822 return vm_call_method_missing(ec, cfp, calling);
4824 case VM_METHOD_TYPE_BMETHOD:
4825 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4826 return vm_call_bmethod(ec, cfp, calling);
4828 case VM_METHOD_TYPE_ALIAS:
4829 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4830 return vm_call_alias(ec, cfp, calling);
4832 case VM_METHOD_TYPE_OPTIMIZED:
4833 return vm_call_optimized(ec, cfp, calling, ci, cc);
4835 case VM_METHOD_TYPE_UNDEF:
4838 case VM_METHOD_TYPE_ZSUPER:
4839 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4841 case VM_METHOD_TYPE_REFINED:
4844 return vm_call_refined(ec, cfp, calling);
4847 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4857 const int stat = ci_missing_reason(ci);
4859 if (vm_ci_mid(ci) == idMethodMissing) {
4860 if (UNLIKELY(calling->heap_argv)) {
4865 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4866 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4870 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4882 VALUE defined_class = me->defined_class;
4883 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4884 return NIL_P(refined_class) ? defined_class : refined_class;
4893 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4895 if (vm_cc_cme(cc) != NULL) {
4896 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4897 case METHOD_VISI_PUBLIC:
4898 return vm_call_method_each_type(ec, cfp, calling);
4900 case METHOD_VISI_PRIVATE:
4901 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4902 enum method_missing_reason stat = MISSING_PRIVATE;
4903 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4905 vm_cc_method_missing_reason_set(cc, stat);
4906 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4907 return vm_call_method_missing(ec, cfp, calling);
4909 return vm_call_method_each_type(ec, cfp, calling);
4911 case METHOD_VISI_PROTECTED:
4912 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4913 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4915 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4916 return vm_call_method_missing(ec, cfp, calling);
4920 VM_ASSERT(vm_cc_cme(cc) != NULL);
4923 calling->cc = &cc_on_stack;
4924 return vm_call_method_each_type(ec, cfp, calling);
4927 return vm_call_method_each_type(ec, cfp, calling);
4934 return vm_call_method_nome(ec, cfp, calling);
4941 RB_DEBUG_COUNTER_INC(ccf_general);
4942 return vm_call_method(ec, reg_cfp, calling);
4948 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4949 VM_ASSERT(cc != vm_cc_empty());
4951 *(vm_call_handler *)&cc->call_ = vm_call_general;
4957 RB_DEBUG_COUNTER_INC(ccf_super_method);
4962 if (ec == NULL)
rb_bug(
"unreachable");
4965 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4966 return vm_call_method(ec, reg_cfp, calling);
4972 vm_search_normal_superclass(
VALUE klass)
4977 klass =
RBASIC(klass)->klass;
4979 klass = RCLASS_ORIGIN(klass);
4983 NORETURN(
static void vm_super_outside(
void));
4986 vm_super_outside(
void)
4992 empty_cc_for_super(
void)
4994 return &vm_empty_cc_for_super;
5000 VALUE current_defined_class;
5007 current_defined_class = vm_defined_class_for_protected_call(me);
5010 reg_cfp->iseq != method_entry_iseqptr(me) &&
5013 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5017 "self has wrong type to call super in this context: "
5018 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5023 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5025 "implicit argument passing of super from method defined"
5026 " by define_method() is not supported."
5027 " Specify all arguments explicitly.");
5030 ID mid = me->def->original_id;
5032 if (!vm_ci_markable(cd->ci)) {
5033 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5037 cd->ci = vm_ci_new_runtime(mid,
5040 vm_ci_kwarg(cd->ci));
5047 VALUE klass = vm_search_normal_superclass(me->defined_class);
5051 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5055 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5059 if (cached_cme == NULL) {
5061 cd->cc = empty_cc_for_super();
5063 else if (cached_cme->called_id != mid) {
5066 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5070 cd->cc = cc = empty_cc_for_super();
5074 switch (cached_cme->def->type) {
5076 case VM_METHOD_TYPE_REFINED:
5078 case VM_METHOD_TYPE_ATTRSET:
5079 case VM_METHOD_TYPE_IVAR:
5080 vm_cc_call_set(cc, vm_call_super_method);
5088 VM_ASSERT((vm_cc_cme(cc),
true));
5096 block_proc_is_lambda(
const VALUE procval)
5101 GetProcPtr(procval, proc);
5102 return proc->is_lambda;
5112 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5115 int is_lambda = FALSE;
5116 VALUE val, arg, blockarg;
5118 const struct vm_ifunc *ifunc = captured->code.ifunc;
5123 else if (argc == 0) {
5130 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5132 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5134 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5137 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5140 VM_GUARDED_PREV_EP(captured->ep),
5142 0, ec->cfp->sp, 0, 0);
5143 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5144 rb_vm_pop_frame(ec);
5152 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5158 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5167 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5169 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5177 vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5179 VALUE ary, arg0 = argv[0];
5184 VM_ASSERT(argv[0] == arg0);
5192 if (rb_simple_iseq_p(iseq)) {
5196 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5198 if (arg_setup_type == arg_setup_block &&
5199 calling->argc == 1 &&
5200 ISEQ_BODY(iseq)->param.flags.has_lead &&
5201 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5202 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5203 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5206 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5207 if (arg_setup_type == arg_setup_block) {
5208 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5210 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5211 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5212 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5214 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5215 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5219 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5226 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5235 calling = &calling_entry;
5236 calling->argc = argc;
5237 calling->block_handler = block_handler;
5238 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5240 calling->heap_argv = 0;
5241 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5243 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5251 bool is_lambda,
VALUE block_handler)
5254 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5255 const int arg_size = ISEQ_BODY(iseq)->param.size;
5256 VALUE *
const rsp = GET_SP() - calling->argc;
5257 VALUE *
const argv = rsp;
5258 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5262 vm_push_frame(ec, iseq,
5263 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5265 VM_GUARDED_PREV_EP(captured->ep), 0,
5266 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5268 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5276 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5278 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5279 int flags = vm_ci_flag(ci);
5281 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5282 ((calling->argc == 0) ||
5283 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5284 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5285 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5286 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5288 if (UNLIKELY(calling->heap_argv)) {
5289 #if VM_ARGC_STACK_MAX < 0
5297 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5298 reg_cfp->sp[-2] = calling->recv;
5299 flags |= VM_CALL_ARGS_SPLAT;
5302 if (calling->argc < 1) {
5305 calling->recv = TOPN(--calling->argc);
5307 if (calling->kw_splat) {
5308 flags |= VM_CALL_KW_SPLAT;
5312 if (calling->argc < 1) {
5315 calling->recv = TOPN(--calling->argc);
5318 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5324 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5329 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5330 argc = calling->argc;
5331 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5337 vm_proc_to_block_handler(
VALUE procval)
5339 const struct rb_block *block = vm_proc_block(procval);
5341 switch (vm_block_type(block)) {
5342 case block_type_iseq:
5343 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5344 case block_type_ifunc:
5345 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5346 case block_type_symbol:
5347 return VM_BH_FROM_SYMBOL(block->as.symbol);
5348 case block_type_proc:
5349 return VM_BH_FROM_PROC(block->as.proc);
5351 VM_UNREACHABLE(vm_yield_with_proc);
5358 bool is_lambda,
VALUE block_handler)
5360 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5361 VALUE proc = VM_BH_TO_PROC(block_handler);
5362 is_lambda = block_proc_is_lambda(proc);
5363 block_handler = vm_proc_to_block_handler(proc);
5366 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5372 bool is_lambda,
VALUE block_handler)
5376 bool is_lambda,
VALUE block_handler);
5378 switch (vm_block_handler_type(block_handler)) {
5379 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5380 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5381 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5382 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5383 default:
rb_bug(
"vm_invoke_block: unreachable");
5386 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5390 vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5397 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5400 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5401 captured->code.iseq = blockiseq;
5403 return rb_vm_make_proc(ec, captured,
rb_cProc);
5407 vm_once_exec(
VALUE iseq)
5414 vm_once_clear(
VALUE data)
5417 is->once.running_thread = NULL;
5429 args[0] = obj; args[1] =
Qfalse;
5431 if (!UNDEF_P(r) &&
RTEST(r)) {
5443 enum defined_type
type = (
enum defined_type)op_type;
5450 return rb_gvar_defined(
SYM2ID(obj));
5452 case DEFINED_CVAR: {
5453 const rb_cref_t *cref = vm_get_cref(GET_EP());
5454 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5459 case DEFINED_CONST_FROM: {
5460 bool allow_nil =
type == DEFINED_CONST;
5462 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5467 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5469 case DEFINED_METHOD:{
5474 switch (METHOD_ENTRY_VISI(me)) {
5475 case METHOD_VISI_PRIVATE:
5477 case METHOD_VISI_PROTECTED:
5481 case METHOD_VISI_PUBLIC:
5485 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5489 return check_respond_to_missing(obj, v);
5494 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5498 case DEFINED_ZSUPER:
5503 VALUE klass = vm_search_normal_superclass(me->defined_class);
5504 if (!klass)
return false;
5506 ID id = me->def->original_id;
5513 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5515 rb_bug(
"unimplemented defined? type (VM)");
5525 return vm_defined(ec, reg_cfp, op_type, obj, v);
5528 static const VALUE *
5529 vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5532 const VALUE *ep = reg_ep;
5533 for (i = 0; i < lv; i++) {
5534 ep = GET_PREV_EP(ep);
5540 vm_get_special_object(
const VALUE *
const reg_ep,
5541 enum vm_special_object_type
type)
5544 case VM_SPECIAL_OBJECT_VMCORE:
5545 return rb_mRubyVMFrozenCore;
5546 case VM_SPECIAL_OBJECT_CBASE:
5547 return vm_get_cbase(reg_ep);
5548 case VM_SPECIAL_OBJECT_CONST_BASE:
5549 return vm_get_const_base(reg_ep);
5551 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5558 const VALUE ary2 = ary2st;
5559 VALUE tmp1 = rb_check_to_array(ary1);
5560 VALUE tmp2 = rb_check_to_array(ary2);
5580 const VALUE ary2 = ary2st;
5581 VALUE tmp2 = rb_check_to_array(ary2);
5595 return vm_concat_array(ary1, ary2st);
5599 rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5601 return vm_concat_to_array(ary1, ary2st);
5607 VALUE tmp = rb_check_to_array(ary);
5611 else if (
RTEST(flag)) {
5624 return vm_splat_array(flag, ary);
5630 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5632 if (flag & VM_CHECKMATCH_ARRAY) {
5636 for (i = 0; i < n; i++) {
5638 VALUE c = check_match(ec, v, target,
type);
5647 return check_match(ec, pattern, target,
type);
5654 return vm_check_match(ec, target, pattern, flag);
5658 vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5660 const VALUE kw_bits = *(ep - bits);
5663 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5664 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5677 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5678 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5679 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5680 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5684 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5687 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5690 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5693 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5700 vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5705 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5706 return rb_public_const_get_at(cbase,
id);
5714 vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5719 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5724 "superclass mismatch for class %"PRIsVALUE
"",
5737 vm_check_if_module(
ID id,
VALUE mod)
5756 vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5759 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5767 vm_declare_module(
ID id,
VALUE cbase)
5773 NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5780 VALUE location = rb_const_source_location_at(cbase,
id);
5781 if (!
NIL_P(location)) {
5782 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5783 " previous definition of %"PRIsVALUE
" was here",
5790 vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5794 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5796 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5800 vm_check_if_namespace(cbase);
5804 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5805 if (!vm_check_if_class(
id, flags, super, klass))
5806 unmatched_redefinition(
"class", cbase,
id, klass);
5810 return vm_declare_class(
id, flags, cbase, super);
5815 vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5819 vm_check_if_namespace(cbase);
5820 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5821 if (!vm_check_if_module(
id, mod))
5822 unmatched_redefinition(
"module", cbase,
id, mod);
5826 return vm_declare_module(
id, cbase);
5831 vm_find_or_create_class_by_id(
ID id,
5836 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5839 case VM_DEFINECLASS_TYPE_CLASS:
5841 return vm_define_class(
id, flags, cbase, super);
5843 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5847 case VM_DEFINECLASS_TYPE_MODULE:
5849 return vm_define_module(
id, flags, cbase);
5852 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5856 static rb_method_visibility_t
5861 if (!vm_env_cref_by_cref(cfp->ep)) {
5862 return METHOD_VISI_PUBLIC;
5865 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5874 if (!vm_env_cref_by_cref(cfp->ep)) {
5878 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5886 rb_method_visibility_t visi;
5891 visi = METHOD_VISI_PUBLIC;
5894 klass = CREF_CLASS_FOR_DEFINITION(cref);
5895 visi = vm_scope_visibility_get(ec);
5902 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5906 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5909 if (!is_singleton && vm_scope_module_func_check(ec)) {
5911 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5921 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5923 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5924 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5927 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5931 enum method_explorer_type {
5933 mexp_search_invokeblock,
5942 VALUE block_handler,
5943 enum method_explorer_type method_explorer
5948 int argc = vm_ci_argc(ci);
5949 VALUE recv = TOPN(argc);
5951 .block_handler = block_handler,
5952 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5958 switch (method_explorer) {
5959 case mexp_search_method:
5960 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5961 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5963 case mexp_search_super:
5964 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5965 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5967 case mexp_search_invokeblock:
5968 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5985 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5986 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5988 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5990 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
5995 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5996 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6007 VALUE bh = VM_BLOCK_HANDLER_NONE;
6008 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6023 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6024 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6026 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6028 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6033 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6034 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6045 VALUE bh = VM_BLOCK_HANDLER_NONE;
6046 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6083 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6089 val = rb_mod_to_s(recv);
6095 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6096 return rb_nil_to_s(recv);
6100 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6101 return rb_true_to_s(recv);
6105 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6106 return rb_false_to_s(recv);
6110 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6111 return rb_fix_to_s(recv);
6119 vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6121 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6130 vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6132 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6141 vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6143 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6152 #define id_cmp idCmp
6157 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6163 rb_snum_t i = num - 1;
6166 if (OPTIMIZED_CMP(v, result) > 0) {
6181 return vm_opt_newarray_max(ec, num,
ptr);
6187 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6193 rb_snum_t i = num - 1;
6196 if (OPTIMIZED_CMP(v, result) < 0) {
6211 return vm_opt_newarray_min(ec, num,
ptr);
6218 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6219 return rb_ary_hash_values(num,
ptr);
6229 return vm_opt_newarray_hash(ec, num,
ptr);
6238 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6240 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6241 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6251 if (!UNDEF_P(buffer)) {
6252 args[1] = rb_hash_new_with_size(1);
6258 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6265 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6271 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6277 vm_track_constant_cache(
ID id,
void *ic)
6279 struct rb_id_table *const_cache = GET_VM()->constant_cache;
6280 VALUE lookup_result;
6283 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6287 ics = st_init_numtable();
6288 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6291 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6299 for (
int i = 0; segments[i]; i++) {
6300 ID id = segments[i];
6301 if (
id == idNULL)
continue;
6302 vm_track_constant_cache(
id, ic);
6312 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6313 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6315 return (ic_cref == NULL ||
6316 ic_cref == vm_get_cref(reg_ep));
6324 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6325 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6330 rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6332 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6338 if (ruby_vm_const_missing_count > 0) {
6339 ruby_vm_const_missing_count = 0;
6346 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6351 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6352 rb_yjit_constant_ic_update(iseq, ic, pos);
6353 rb_rjit_constant_ic_update(iseq, ic, pos);
6362 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6365 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6368 ruby_vm_constant_cache_misses++;
6369 val = vm_get_ev_const_chain(ec, segments);
6370 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6373 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6385 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6386 return is->once.value;
6388 else if (is->once.running_thread == NULL) {
6390 is->once.running_thread = th;
6394 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6397 else if (is->once.running_thread == th) {
6399 return vm_once_exec((
VALUE)iseq);
6403 RUBY_VM_CHECK_INTS(ec);
6410 vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6412 switch (OBJ_BUILTIN_TYPE(key)) {
6418 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6419 SYMBOL_REDEFINED_OP_FLAG |
6420 INTEGER_REDEFINED_OP_FLAG |
6421 FLOAT_REDEFINED_OP_FLAG |
6422 NIL_REDEFINED_OP_FLAG |
6423 TRUE_REDEFINED_OP_FLAG |
6424 FALSE_REDEFINED_OP_FLAG |
6425 STRING_REDEFINED_OP_FLAG)) {
6429 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6433 if (rb_hash_stlike_lookup(hash, key, &val)) {
6444 NORETURN(
static void
6453 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6454 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6455 static const char stack_consistency_error[] =
6456 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6457 #if defined RUBY_DEVEL
6463 rb_bug(stack_consistency_error, nsp, nbp);
6470 if (FIXNUM_2_P(recv, obj) &&
6471 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6472 return rb_fix_plus_fix(recv, obj);
6474 else if (FLONUM_2_P(recv, obj) &&
6475 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6483 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6488 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6489 return rb_str_opt_plus(recv, obj);
6493 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6504 if (FIXNUM_2_P(recv, obj) &&
6505 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6506 return rb_fix_minus_fix(recv, obj);
6508 else if (FLONUM_2_P(recv, obj) &&
6509 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6517 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6528 if (FIXNUM_2_P(recv, obj) &&
6529 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6530 return rb_fix_mul_fix(recv, obj);
6532 else if (FLONUM_2_P(recv, obj) &&
6533 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6541 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6552 if (FIXNUM_2_P(recv, obj) &&
6553 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6554 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6556 else if (FLONUM_2_P(recv, obj) &&
6557 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6558 return rb_flo_div_flo(recv, obj);
6565 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6566 return rb_flo_div_flo(recv, obj);
6576 if (FIXNUM_2_P(recv, obj) &&
6577 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6578 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6580 else if (FLONUM_2_P(recv, obj) &&
6581 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6589 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6600 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6601 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6603 if (!UNDEF_P(val)) {
6604 return RBOOL(!
RTEST(val));
6614 if (FIXNUM_2_P(recv, obj) &&
6615 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6618 else if (FLONUM_2_P(recv, obj) &&
6619 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6627 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6639 if (FIXNUM_2_P(recv, obj) &&
6640 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6643 else if (FLONUM_2_P(recv, obj) &&
6644 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6652 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6664 if (FIXNUM_2_P(recv, obj) &&
6665 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6668 else if (FLONUM_2_P(recv, obj) &&
6669 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6677 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6689 if (FIXNUM_2_P(recv, obj) &&
6690 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6693 else if (FLONUM_2_P(recv, obj) &&
6694 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6702 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6719 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6728 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6746 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6757 if (FIXNUM_2_P(recv, obj) &&
6758 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6770 if (FIXNUM_2_P(recv, obj) &&
6771 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6772 return rb_fix_aref(recv, obj);
6777 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6779 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6782 return rb_ary_aref1(recv, obj);
6786 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6801 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6807 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6820 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6821 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6822 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6833 return vm_opt_aref_with(recv, key);
6840 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6841 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6850 vm_opt_length(
VALUE recv,
int bop)
6856 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6857 if (bop == BOP_EMPTY_P) {
6865 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6869 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6878 vm_opt_empty_p(
VALUE recv)
6880 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6893 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6896 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6912 case RSHIFT(~0UL, 1):
6915 return
rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6933 vm_opt_succ(
VALUE recv)
6936 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6937 return fix_succ(recv);
6943 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
6954 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
6955 return RBOOL(!
RTEST(recv));
6970 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
6974 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
6992 VALUE self = GET_SELF();
6994 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
6996 if (event & global_hooks->events) {
6999 vm_dtrace(event, ec);
7000 rb_exec_event_hook_orig(ec, global_hooks, event,
self, 0, 0, 0 , val, 0);
7006 if (local_hooks != NULL) {
7007 if (event & local_hooks->events) {
7010 rb_exec_event_hook_orig(ec, local_hooks, event,
self, 0, 0, 0 , val, 0);
7016 #define VM_TRACE_HOOK(target_event, val) do { \
7017 if ((pc_events & (target_event)) & enabled_flags) { \
7018 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7025 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7026 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7027 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7033 const VALUE *pc = reg_cfp->pc;
7034 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7037 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7043 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7046 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7047 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7051 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7052 enabled_flags |= iseq_local_events;
7054 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7056 if (bmethod_frame) {
7058 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7059 bmethod_local_hooks = me->def->body.bmethod.hooks;
7060 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7061 if (bmethod_local_hooks) {
7062 bmethod_local_events = bmethod_local_hooks->events;
7067 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7071 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7079 else if (ec->trace_arg != NULL) {
7087 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7090 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7094 (
int)rb_iseq_line_no(iseq, pos),
7097 VM_ASSERT(reg_cfp->pc == pc);
7098 VM_ASSERT(pc_events != 0);
7108 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7109 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7125 #undef VM_TRACE_HOOK
7127 #if VM_CHECK_MODE > 0
7128 NORETURN( NOINLINE( COLDFUNC
7129 void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7132 Init_vm_stack_canary(
void)
7135 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7136 vm_stack_canary |= 0x01;
7138 vm_stack_canary_was_born =
true;
7143 rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7147 const char *insn = rb_insns_name(i);
7151 rb_bug(
"dead canary found at %s: %s", insn, str);
7155 void Init_vm_stack_canary(
void) { }
7187 return (*(rb_invoke_funcptr0_t)funcptr)(ec,
self);
7194 return (*(rb_invoke_funcptr1_t)funcptr)(ec,
self, argv[0]);
7201 return (*(rb_invoke_funcptr2_t)funcptr)(ec,
self, argv[0], argv[1]);
7208 return (*(rb_invoke_funcptr3_t)funcptr)(ec,
self, argv[0], argv[1], argv[2]);
7215 return (*(rb_invoke_funcptr4_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3]);
7222 return (*(rb_invoke_funcptr5_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7229 return (*(rb_invoke_funcptr6_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7236 return (*(rb_invoke_funcptr7_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7243 return (*(rb_invoke_funcptr8_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7249 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7250 return (*(rb_invoke_funcptr9_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7256 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7257 return (*(rb_invoke_funcptr10_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7263 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7264 return (*(rb_invoke_funcptr11_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7270 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7271 return (*(rb_invoke_funcptr12_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7277 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7278 return (*(rb_invoke_funcptr13_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7284 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7285 return (*(rb_invoke_funcptr14_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7291 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7292 return (*(rb_invoke_funcptr15_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7297 static builtin_invoker
7298 lookup_builtin_invoker(
int argc)
7300 static const builtin_invoker invokers[] = {
7319 return invokers[argc];
7325 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7326 SETUP_CANARY(canary_p);
7327 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7328 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7329 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7336 return invoke_bf(ec, cfp, bf, argv);
7343 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7344 for (
int i=0; i<bf->argc; i++) {
7345 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7347 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7348 (
void *)(uintptr_t)bf->func_ptr);
7351 if (bf->argc == 0) {
7352 return invoke_bf(ec, cfp, bf, NULL);
7355 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7356 return invoke_bf(ec, cfp, bf, argv);
7366 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
void rb_warning(const char *fmt,...)
Issues a warning.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
int rb_during_gc(void)
Queries if the GC is busy.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_unshift(VALUE ary, VALUE elem)
Destructively prepends the passed item at the beginning of the passed array.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_cat_cstr(VALUE dst, const char *src)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well...
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a Ruby's String instead of C's.
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
VALUE rb_uint2big(uintptr_t i)
Converts a C's intptr_t into an instance of rb_cInteger.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
void * ruby_xcalloc(size_t nelems, size_t elemsiz)
Identical to ruby_xmalloc2(), except it returns a zero-filled storage instance.