11 #include "ruby/internal/config.h"
15 #ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20 #include "debug_counter.h"
22 #include "internal/class.h"
23 #include "internal/compar.h"
24 #include "internal/hash.h"
25 #include "internal/numeric.h"
26 #include "internal/proc.h"
27 #include "internal/random.h"
28 #include "internal/variable.h"
29 #include "internal/struct.h"
34 #include "insns_info.inc"
40 int argc,
const VALUE *argv,
int priv);
50 ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73 NORETURN(
static void vm_stackoverflow(
void));
76 vm_stackoverflow(
void)
78 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 #ifdef USE_SIGALTSTACK
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
102 #if VM_CHECK_MODE > 0
104 callable_class_p(
VALUE klass)
106 #if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147 vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215 #define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237 static VALUE vm_stack_canary;
238 static bool vm_stack_canary_was_born =
false;
245 unsigned int pos = 0;
246 while (pos < ISEQ_BODY(iseq)->iseq_size) {
247 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
248 unsigned int next_pos = pos + insn_len(opcode);
249 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
254 rb_bug(
"failed to find the previous insn");
263 if (! LIKELY(vm_stack_canary_was_born)) {
266 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
270 else if (! (iseq = GET_ISEQ())) {
273 else if (LIKELY(sp[0] != vm_stack_canary)) {
282 const VALUE *orig = rb_iseq_original_iseq(iseq);
283 const VALUE iseqw = rb_iseqw_new(iseq);
285 const char *stri = rb_str_to_cstr(inspection);
286 const VALUE disasm = rb_iseq_disasm(iseq);
287 const char *strd = rb_str_to_cstr(disasm);
288 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
289 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
290 const char *name = insn_name(insn);
296 "We are killing the stack canary set by %s, "
297 "at %s@pc=%"PRIdPTR
"\n"
298 "watch out the C stack trace.\n"
300 name, stri, pos, strd);
303 #define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
306 #define vm_check_canary(ec, sp)
307 #define vm_check_frame(a, b, c, d)
310 #if USE_DEBUG_COUNTER
312 vm_push_frame_debug_counter_inc(
319 RB_DEBUG_COUNTER_INC(frame_push);
321 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
322 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
323 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
326 RB_DEBUG_COUNTER_INC(frame_R2R);
329 RB_DEBUG_COUNTER_INC(frame_R2C);
334 RB_DEBUG_COUNTER_INC(frame_C2R);
337 RB_DEBUG_COUNTER_INC(frame_C2C);
342 switch (
type & VM_FRAME_MAGIC_MASK) {
343 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
344 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
345 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
346 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
347 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
348 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
349 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
350 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
351 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
357 #define vm_push_frame_debug_counter_inc(ec, cfp, t)
362 rb_vm_stack_canary(
void)
364 #if VM_CHECK_MODE > 0
365 return vm_stack_canary;
371 STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
372 STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
373 STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
389 vm_check_frame(
type, specval, cref_or_me, iseq);
390 VM_ASSERT(local_size >= 0);
393 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
394 vm_check_canary(ec, sp);
399 for (
int i=0; i < local_size; i++) {
416 #if VM_DEBUG_BP_CHECK
426 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
427 atomic_signal_fence(memory_order_seq_cst);
435 vm_push_frame_debug_counter_inc(ec, cfp,
type);
443 if (VMDEBUG == 2) SDR();
445 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
452 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
454 if (VMDEBUG == 2) SDR();
456 RUBY_VM_CHECK_INTS(ec);
457 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
459 return flags & VM_FRAME_FLAG_FINISH;
465 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
472 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
474 rb_imemo_tmpbuf_set_ptr(tmpbuf,
ptr);
478 dmy_iseq->body = dmy_body;
479 dmy_body->type = ISEQ_TYPE_TOP;
480 dmy_body->location.pathobj = fname;
484 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
486 VM_BLOCK_HANDLER_NONE,
498 rb_arity_error_new(
int argc,
int min,
int max)
500 VALUE err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
515 rb_error_arity(
int argc,
int min,
int max)
522 NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
525 vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
528 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
529 VM_FORCE_WRITE(&ep[index], v);
530 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
531 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
536 vm_env_write(
const VALUE *ep,
int index,
VALUE v)
538 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
539 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
540 VM_STACK_ENV_WRITE(ep, index, v);
543 vm_env_write_slowpath(ep, index, v);
548 rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
550 vm_env_write(ep, index, v);
556 if (block_handler == VM_BLOCK_HANDLER_NONE) {
560 switch (vm_block_handler_type(block_handler)) {
561 case block_handler_type_iseq:
562 case block_handler_type_ifunc:
563 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
564 case block_handler_type_symbol:
565 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
566 case block_handler_type_proc:
567 return VM_BH_TO_PROC(block_handler);
569 VM_UNREACHABLE(rb_vm_bh_to_procval);
576 #if VM_CHECK_MODE > 0
578 vm_svar_valid_p(
VALUE svar)
581 switch (imemo_type(svar)) {
590 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
600 if (lep && (ec == NULL || ec->root_lep != lep)) {
601 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
604 svar = ec->root_svar;
607 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
615 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
617 if (lep && (ec == NULL || ec->root_lep != lep)) {
618 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
621 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->
self, &ec->root_svar, svar);
628 const struct vm_svar *svar = lep_svar(ec, lep);
633 case VM_SVAR_LASTLINE:
634 return svar->lastline;
635 case VM_SVAR_BACKREF:
636 return svar->backref;
638 const VALUE ary = svar->others;
653 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
664 struct vm_svar *svar = lep_svar(ec, lep);
667 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
671 case VM_SVAR_LASTLINE:
674 case VM_SVAR_BACKREF:
678 VALUE ary = svar->others;
694 val = lep_svar_get(ec, lep, key);
697 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
714 rb_bug(
"unexpected back-ref");
727 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
737 return rb_reg_last_defined(backref);
739 rb_bug(
"unexpected back-ref");
743 nth = (int)(
type >> 1);
750 check_method_entry(
VALUE obj,
int can_be_svar)
752 if (obj ==
Qfalse)
return NULL;
754 #if VM_CHECK_MODE > 0
758 switch (imemo_type(obj)) {
768 #if VM_CHECK_MODE > 0
769 rb_bug(
"check_method_entry: svar should not be there:");
778 const VALUE *ep = cfp->ep;
781 while (!VM_ENV_LOCAL_P(ep)) {
782 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
783 ep = VM_ENV_PREV_EP(ep);
786 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
iseqptr;
803 switch (me->def->type) {
804 case VM_METHOD_TYPE_ISEQ:
805 return me->def->body.iseq.
cref;
811 #if VM_CHECK_MODE == 0
815 check_cref(
VALUE obj,
int can_be_svar)
817 if (obj ==
Qfalse)
return NULL;
819 #if VM_CHECK_MODE > 0
823 switch (imemo_type(obj)) {
833 #if VM_CHECK_MODE > 0
834 rb_bug(
"check_method_entry: svar should not be there:");
841 vm_env_cref(
const VALUE *ep)
845 while (!VM_ENV_LOCAL_P(ep)) {
846 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
847 ep = VM_ENV_PREV_EP(ep);
850 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
854 is_cref(
const VALUE v,
int can_be_svar)
857 switch (imemo_type(v)) {
870 vm_env_cref_by_cref(
const VALUE *ep)
872 while (!VM_ENV_LOCAL_P(ep)) {
873 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
874 ep = VM_ENV_PREV_EP(ep);
876 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
880 cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
882 const VALUE v = *vptr;
886 switch (imemo_type(v)) {
889 new_cref = vm_cref_dup(cref);
894 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
899 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
903 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
912 vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
914 if (vm_env_cref_by_cref(ep)) {
918 while (!VM_ENV_LOCAL_P(ep)) {
919 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
920 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
923 ep = VM_ENV_PREV_EP(ep);
925 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
926 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
929 rb_bug(
"vm_cref_dup: unreachable");
934 vm_get_cref(
const VALUE *ep)
942 rb_bug(
"vm_get_cref: unreachable");
947 rb_vm_get_cref(
const VALUE *ep)
949 return vm_get_cref(ep);
960 return vm_get_cref(cfp->ep);
964 vm_get_const_key_cref(
const VALUE *ep)
970 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
971 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
974 cref = CREF_NEXT(cref);
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 *new_cref_ptr = new_cref;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 *new_cref_ptr = new_cref;
995 new_cref_ptr = &new_cref->next;
997 *new_cref_ptr = NULL;
1006 prev_cref = vm_env_cref(ep);
1012 prev_cref = vm_env_cref(cfp->ep);
1016 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1020 vm_get_cbase(
const VALUE *ep)
1022 const rb_cref_t *cref = vm_get_cref(ep);
1024 return CREF_CLASS_FOR_DEFINITION(cref);
1028 vm_get_const_base(
const VALUE *ep)
1030 const rb_cref_t *cref = vm_get_cref(ep);
1033 if (!CREF_PUSHED_BY_EVAL(cref)) {
1034 return CREF_CLASS_FOR_DEFINITION(cref);
1036 cref = CREF_NEXT(cref);
1043 vm_check_if_namespace(
VALUE klass)
1051 vm_ensure_not_refinement_module(
VALUE self)
1054 rb_warn(
"not defined at the refinement, but at the outer class/module");
1070 if (
NIL_P(orig_klass) && allow_nil) {
1072 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1076 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1077 root_cref = CREF_NEXT(root_cref);
1080 while (cref && CREF_NEXT(cref)) {
1081 if (CREF_PUSHED_BY_EVAL(cref)) {
1085 klass = CREF_CLASS(cref);
1087 cref = CREF_NEXT(cref);
1089 if (!
NIL_P(klass)) {
1093 if ((ce = rb_const_lookup(klass,
id))) {
1094 rb_const_warn_if_deprecated(ce, klass,
id);
1097 if (am == klass)
break;
1099 if (is_defined)
return 1;
1100 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1102 goto search_continue;
1109 if (UNLIKELY(!rb_ractor_main_p())) {
1112 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass),
rb_id2name(
id));
1123 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1124 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1138 vm_check_if_namespace(orig_klass);
1140 return rb_public_const_defined_from(orig_klass,
id);
1143 return rb_public_const_get_from(orig_klass,
id);
1151 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1159 int allow_nil = TRUE;
1160 if (segments[0] == idNULL) {
1165 while (segments[idx]) {
1166 ID id = segments[idx++];
1167 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1180 rb_bug(
"vm_get_cvar_base: no cref");
1183 while (CREF_NEXT(cref) &&
1184 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1185 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1186 cref = CREF_NEXT(cref);
1188 if (top_level_raise && !CREF_NEXT(cref)) {
1192 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1200 ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1202 fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1205 vm_cc_attr_index_set(cc, index, shape_id);
1208 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1212 #define ractor_incidental_shareable_p(cond, val) \
1213 (!(cond) || rb_ractor_shareable_p(val))
1214 #define ractor_object_incidental_shareable_p(obj, val) \
1215 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1217 #define ATTR_INDEX_NOT_SET (attr_index_t)-1
1225 shape_id_t shape_id;
1229 return default_value;
1232 #if SHAPE_IN_BASIC_FLAGS
1233 shape_id = RBASIC_SHAPE_ID(obj);
1241 #if !SHAPE_IN_BASIC_FLAGS
1242 shape_id = ROBJECT_SHAPE_ID(obj);
1248 if (UNLIKELY(!rb_ractor_main_p())) {
1256 if (default_value ==
Qundef) {
1264 ivar_list = RCLASS_IVPTR(obj);
1266 #if !SHAPE_IN_BASIC_FLAGS
1267 shape_id = RCLASS_SHAPE_ID(obj);
1275 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1276 #if !SHAPE_IN_BASIC_FLAGS
1277 shape_id = ivtbl->shape_id;
1279 ivar_list = ivtbl->as.shape.ivptr;
1282 return default_value;
1286 shape_id_t cached_id;
1290 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1293 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1296 if (LIKELY(cached_id == shape_id)) {
1297 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1299 if (index == ATTR_INDEX_NOT_SET) {
1300 return default_value;
1303 val = ivar_list[index];
1304 #if USE_DEBUG_COUNTER
1305 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1308 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1314 #if USE_DEBUG_COUNTER
1316 if (cached_id != INVALID_SHAPE_ID) {
1317 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1320 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1324 if (cached_id != INVALID_SHAPE_ID) {
1325 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1328 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1331 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1334 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1338 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1343 table = (
st_table *)RCLASS_IVPTR(obj);
1347 table = ROBJECT_IV_HASH(obj);
1352 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1353 table = ivtbl->as.complex.table;
1359 if (!table || !st_lookup(table,
id, &val)) {
1360 val = default_value;
1364 shape_id_t previous_cached_id = cached_id;
1365 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1368 if (cached_id != previous_cached_id) {
1369 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1372 if (index == ATTR_INDEX_NOT_SET) {
1373 val = default_value;
1377 val = ivar_list[index];
1383 vm_cc_attr_index_initialize(cc, shape_id);
1386 vm_ic_attr_index_initialize(ic, shape_id);
1389 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1414 populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1438 rb_check_frozen(obj);
1440 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1442 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1444 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1445 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1448 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1458 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1464 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1467 NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1469 vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1471 #if SHAPE_IN_BASIC_FLAGS
1472 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1474 shape_id_t shape_id = rb_generic_shape_id(obj);
1480 if (shape_id == dest_shape_id) {
1481 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1483 else if (dest_shape_id != INVALID_SHAPE_ID) {
1484 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1485 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1487 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1498 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1500 if (shape_id != dest_shape_id) {
1501 #if SHAPE_IN_BASIC_FLAGS
1502 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1504 ivtbl->shape_id = dest_shape_id;
1510 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1516 vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1524 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1525 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1527 if (LIKELY(shape_id == dest_shape_id)) {
1528 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1531 else if (dest_shape_id != INVALID_SHAPE_ID) {
1532 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1533 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1534 shape_id_t source_shape_id = dest_shape->parent_id;
1536 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1537 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1539 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1541 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1557 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1558 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1564 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1576 VALUE defined_class = 0;
1580 defined_class =
RBASIC(defined_class)->klass;
1583 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1585 rb_bug(
"the cvc table should be set");
1589 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1590 rb_bug(
"should have cvar cache entry");
1595 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1611 cref = vm_get_cref(GET_EP());
1613 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1614 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1616 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1622 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1624 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1630 return vm_getclassvariable(iseq, cfp,
id, ic);
1637 cref = vm_get_cref(GET_EP());
1639 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1640 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1642 rb_class_ivar_set(ic->entry->class_value,
id, val);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1650 update_classvariable_cache(iseq, klass,
id, cref, ic);
1656 vm_setclassvariable(iseq, cfp,
id, val, ic);
1662 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1673 shape_id_t dest_shape_id;
1675 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1677 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1684 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1688 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1695 vm_setinstancevariable(iseq, obj,
id, val, ic);
1704 ec->tag->state = RUBY_TAG_FATAL;
1707 ec->tag->state = TAG_THROW;
1709 else if (THROW_DATA_P(err)) {
1710 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1713 ec->tag->state = TAG_RAISE;
1720 const int flag,
const VALUE throwobj)
1728 else if (state == TAG_BREAK) {
1730 const VALUE *ep = GET_EP();
1731 const rb_iseq_t *base_iseq = GET_ISEQ();
1732 escape_cfp = reg_cfp;
1734 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1735 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1736 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1737 ep = escape_cfp->ep;
1738 base_iseq = escape_cfp->iseq;
1741 ep = VM_ENV_PREV_EP(ep);
1742 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1743 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1744 VM_ASSERT(escape_cfp->iseq == base_iseq);
1748 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1754 ep = VM_ENV_PREV_EP(ep);
1756 while (escape_cfp < eocfp) {
1757 if (escape_cfp->ep == ep) {
1758 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1759 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1764 for (i=0; i < ct->size; i++) {
1766 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1768 if (entry->type == CATCH_TYPE_BREAK &&
1769 entry->iseq == base_iseq &&
1770 entry->start < epc && entry->end >= epc) {
1771 if (entry->cont == epc) {
1780 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1785 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1788 else if (state == TAG_RETRY) {
1789 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1791 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1793 else if (state == TAG_RETURN) {
1794 const VALUE *current_ep = GET_EP();
1795 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1796 int in_class_frame = 0;
1798 escape_cfp = reg_cfp;
1801 while (!VM_ENV_LOCAL_P(ep)) {
1802 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1805 ep = VM_ENV_PREV_EP(ep);
1809 while (escape_cfp < eocfp) {
1810 const VALUE *lep = VM_CF_LEP(escape_cfp);
1816 if (lep == target_lep &&
1817 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1818 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1823 if (lep == target_lep) {
1824 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1826 if (in_class_frame) {
1831 const VALUE *tep = current_ep;
1833 while (target_lep != tep) {
1834 if (escape_cfp->ep == tep) {
1836 if (tep == target_ep) {
1840 goto unexpected_return;
1843 tep = VM_ENV_PREV_EP(tep);
1847 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1848 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1850 case ISEQ_TYPE_MAIN:
1852 if (in_class_frame)
goto unexpected_return;
1853 if (target_ep == NULL) {
1857 goto unexpected_return;
1861 case ISEQ_TYPE_EVAL: {
1863 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1864 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1865 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1866 t = ISEQ_BODY(is)->type;
1868 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1871 case ISEQ_TYPE_CLASS:
1880 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1881 if (target_ep == NULL) {
1885 goto unexpected_return;
1889 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1892 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1898 rb_bug(
"isns(throw): unsupported throw type");
1901 ec->tag->state = state;
1902 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1907 rb_num_t throw_state,
VALUE throwobj)
1909 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1910 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1913 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1916 return vm_throw_continue(ec, throwobj);
1923 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1929 int is_splat = flag & 0x01;
1932 const VALUE obj = ary;
1944 if (num + is_splat == 0) {
1947 else if (flag & 0x02) {
1952 for (i = 0; i < num -
len; i++) {
1957 for (j = 0; i < num; i++, j++) {
1979 for (; i < num -
len; i++) {
1983 for (rb_num_t j = 0; i < num; i++, j++) {
1984 *cfp->sp++ =
ptr[
len - j - 1];
1988 for (rb_num_t j = 0; j < num; j++) {
1989 *cfp->sp++ =
ptr[num - j - 1];
2005 #if VM_CHECK_MODE > 0
2006 ccs->debug_sig = ~(
VALUE)ccs;
2012 ccs->entries = NULL;
2014 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2022 if (! vm_cc_markable(cc)) {
2026 if (UNLIKELY(ccs->len == ccs->capa)) {
2027 if (ccs->capa == 0) {
2029 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2033 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2036 VM_ASSERT(ccs->len < ccs->capa);
2038 const int pos = ccs->len++;
2039 ccs->entries[pos].argc = vm_ci_argc(ci);
2040 ccs->entries[pos].flag = vm_ci_flag(ci);
2043 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2049 #if VM_CHECK_MODE > 0
2053 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2054 for (
int i=0; i<ccs->len; i++) {
2055 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2056 ccs->entries[i].flag,
2057 ccs->entries[i].argc);
2058 rp(ccs->entries[i].cc);
2065 VM_ASSERT(vm_ccs_p(ccs));
2066 VM_ASSERT(ccs->len <= ccs->capa);
2068 for (
int i=0; i<ccs->len; i++) {
2071 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2072 VM_ASSERT(vm_cc_class_check(cc, klass));
2073 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2074 VM_ASSERT(!vm_cc_super_p(cc));
2075 VM_ASSERT(!vm_cc_refinement_p(cc));
2086 const ID mid = vm_ci_mid(ci);
2087 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2094 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2096 const int ccs_len = ccs->len;
2098 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2099 rb_vm_ccs_free(ccs);
2100 rb_id_table_delete(cc_tbl, mid);
2104 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2109 unsigned int argc = vm_ci_argc(ci);
2110 unsigned int flag = vm_ci_flag(ci);
2112 for (
int i=0; i<ccs_len; i++) {
2113 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2114 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2115 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2117 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2119 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2120 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2122 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2123 VM_ASSERT(ccs_cc->klass == klass);
2124 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2133 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2136 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2142 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2144 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2147 cme = rb_callable_method_entry(klass, mid);
2150 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2154 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2155 return &vm_empty_cc;
2158 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2163 VM_ASSERT(cc_tbl != NULL);
2165 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2171 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2175 cme = rb_check_overloaded_cme(cme, ci);
2177 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2178 vm_ccs_push(klass, ccs, ci, cc);
2180 VM_ASSERT(vm_cc_cme(cc) != NULL);
2181 VM_ASSERT(cme->called_id == mid);
2182 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2196 cc = vm_search_cc(klass, ci);
2199 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2200 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2201 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2202 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2203 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2213 #if USE_DEBUG_COUNTER
2217 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2219 #if OPT_INLINE_METHOD_CACHE
2223 if (cd_owner && cc != empty_cc) {
2227 #if USE_DEBUG_COUNTER
2228 if (!old_cc || old_cc == empty_cc) {
2230 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2232 else if (old_cc == cc) {
2233 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2235 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2238 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2239 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2240 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2243 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2248 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2249 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2260 #if OPT_INLINE_METHOD_CACHE
2261 if (LIKELY(vm_cc_class_check(cc, klass))) {
2262 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2263 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2264 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2265 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2266 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2267 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2271 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2274 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2278 return vm_search_method_slowpath0(cd_owner, cd, klass);
2285 VM_ASSERT(klass !=
Qfalse);
2288 return vm_search_method_fastpath(cd_owner, cd, klass);
2291 #if __has_attribute(transparent_union)
2304 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2306 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2307 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2308 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2309 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2312 # define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2315 # define make_cfunc_type(f) (cfunc_type)(f)
2325 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2326 VM_ASSERT(callable_method_entry_p(me));
2328 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2332 #if __has_attribute(transparent_union)
2333 return me->def->body.cfunc.func == func.anyargs;
2335 return me->def->body.cfunc.func == func;
2344 return me && METHOD_ENTRY_BASIC(me);
2350 VM_ASSERT(iseq != NULL);
2352 return check_cfunc(vm_cc_cme(cc), func);
2355 #define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2356 #define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2358 #define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2390 opt_equality_specialized(
VALUE recv,
VALUE obj)
2392 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2393 goto compare_by_identity;
2395 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2396 goto compare_by_identity;
2399 goto compare_by_identity;
2408 #if MSC_VERSION_BEFORE(1300)
2412 else if (isnan(b)) {
2417 return RBOOL(a == b);
2424 return rb_str_eql_internal(obj, recv);
2429 compare_by_identity:
2430 return RBOOL(recv == obj);
2436 VM_ASSERT(cd_owner != NULL);
2438 VALUE val = opt_equality_specialized(recv, obj);
2439 if (!UNDEF_P(val))
return val;
2441 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2445 return RBOOL(recv == obj);
2449 #undef EQ_UNREDEFINED_P
2452 NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2455 opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2457 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2459 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2460 return RBOOL(recv == obj);
2470 VALUE val = opt_equality_specialized(recv, obj);
2471 if (!UNDEF_P(val)) {
2475 return opt_equality_by_mid_slowpath(recv, obj, mid);
2482 return opt_equality_by_mid(obj1, obj2, idEq);
2488 return opt_equality_by_mid(obj1, obj2, idEqlP);
2498 case VM_CHECKMATCH_TYPE_WHEN:
2500 case VM_CHECKMATCH_TYPE_RESCUE:
2505 case VM_CHECKMATCH_TYPE_CASE: {
2506 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2509 rb_bug(
"check_match: unreachable");
2514 #if MSC_VERSION_BEFORE(1300)
2515 #define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2517 #define CHECK_CMP_NAN(a, b)
2521 double_cmp_lt(
double a,
double b)
2523 CHECK_CMP_NAN(a, b);
2524 return RBOOL(a < b);
2528 double_cmp_le(
double a,
double b)
2530 CHECK_CMP_NAN(a, b);
2531 return RBOOL(a <= b);
2535 double_cmp_gt(
double a,
double b)
2537 CHECK_CMP_NAN(a, b);
2538 return RBOOL(a > b);
2542 double_cmp_ge(
double a,
double b)
2544 CHECK_CMP_NAN(a, b);
2545 return RBOOL(a >= b);
2549 static inline VALUE *
2554 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2555 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2557 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2558 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2559 int params = ISEQ_BODY(cfp->iseq)->param.size;
2562 bp += vm_ci_argc(ci);
2565 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2569 #if VM_DEBUG_BP_CHECK
2570 if (bp != cfp->bp_check) {
2571 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2572 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2573 (
long)(bp - GET_EC()->vm_stack));
2574 rb_bug(
"vm_base_ptr: unreachable");
2587 return vm_base_ptr(cfp);
2592 #include "vm_args.c"
2602 static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2607 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2609 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2615 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2618 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2619 int param = ISEQ_BODY(iseq)->param.size;
2620 int local = ISEQ_BODY(iseq)->local_table_size;
2621 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2627 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2629 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2630 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2631 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2632 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2633 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2634 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2638 rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2640 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2641 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2642 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2643 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2644 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2645 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2646 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2647 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2651 rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2653 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2654 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2655 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2656 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2657 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2658 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2659 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2662 #define ALLOW_HEAP_ARGV (-2)
2663 #define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2668 vm_check_canary(GET_EC(), cfp->sp);
2674 int argc = calling->argc;
2676 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2680 VALUE *argv = cfp->sp - argc;
2684 cfp->sp -= argc - 1;
2685 cfp->sp[-1] = argv_ary;
2687 calling->heap_argv = argv_ary;
2693 if (max_args >= 0 &&
len + argc > max_args) {
2701 calling->argc +=
len - (max_args - argc + 1);
2702 len = max_args - argc + 1;
2711 calling->heap_argv = 0;
2713 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2715 for (i = 0; i <
len; i++) {
2716 *cfp->sp++ =
ptr[i];
2728 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2729 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2730 const VALUE h = rb_hash_new_with_size(kw_len);
2731 VALUE *sp = cfp->sp;
2734 for (i=0; i<kw_len; i++) {
2739 cfp->sp -= kw_len - 1;
2740 calling->argc -= kw_len - 1;
2741 calling->kw_splat = 1;
2745 vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2748 if (keyword_hash !=
Qnil) {
2750 keyword_hash =
rb_hash_dup(rb_to_hash_type(keyword_hash));
2753 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2761 return keyword_hash;
2767 const struct rb_callinfo *restrict ci,
int max_args)
2769 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2770 if (IS_ARGS_KW_SPLAT(ci)) {
2772 VM_ASSERT(calling->kw_splat == 1);
2776 VALUE ary = cfp->sp[0];
2777 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2780 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2784 if (UNLIKELY(calling->heap_argv)) {
2786 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2787 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2788 calling->kw_splat = 0;
2796 VM_ASSERT(calling->kw_splat == 1);
2800 calling->kw_splat = 0;
2805 VM_ASSERT(calling->kw_splat == 0);
2809 VALUE ary = cfp->sp[0];
2811 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2816 VALUE last_hash, argv_ary;
2817 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2818 if (!IS_ARGS_KEYWORD(ci) &&
2821 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2826 calling->kw_splat = 1;
2832 if (!IS_ARGS_KEYWORD(ci) &&
2833 calling->argc > 0 &&
2835 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2843 calling->kw_splat = 1;
2849 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2851 VM_ASSERT(calling->kw_splat == 1);
2852 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2857 calling->kw_splat = 0;
2863 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2865 VM_ASSERT(calling->kw_splat == 0);
2871 vm_caller_setup_arg_kw(cfp, calling, ci);
2875 #define USE_OPT_HIST 0
2878 #define OPT_HIST_MAX 64
2879 static int opt_hist[OPT_HIST_MAX+1];
2883 opt_hist_show_results_at_exit(
void)
2885 for (
int i=0; i<OPT_HIST_MAX; i++) {
2886 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2896 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2897 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2898 const int opt = calling->argc - lead_num;
2899 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2900 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2901 const int param = ISEQ_BODY(iseq)->param.size;
2902 const int local = ISEQ_BODY(iseq)->local_table_size;
2903 const int delta = opt_num - opt;
2905 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2908 if (opt_pc < OPT_HIST_MAX) {
2912 opt_hist[OPT_HIST_MAX]++;
2916 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2924 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2925 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2926 const int opt = calling->argc - lead_num;
2927 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2929 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2932 if (opt_pc < OPT_HIST_MAX) {
2936 opt_hist[OPT_HIST_MAX]++;
2940 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2945 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2946 VALUE *
const locals);
2953 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2954 int param_size = ISEQ_BODY(iseq)->param.size;
2955 int local_size = ISEQ_BODY(iseq)->local_table_size;
2958 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2960 local_size = local_size + vm_ci_argc(calling->cd->ci);
2961 param_size = param_size + vm_ci_argc(calling->cd->ci);
2963 cfp->sp[0] = (
VALUE)calling->cd->ci;
2965 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2975 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2976 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2978 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2979 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2981 const int ci_kw_len = kw_arg->keyword_len;
2982 const VALUE *
const ci_keywords = kw_arg->keywords;
2983 VALUE *argv = cfp->sp - calling->argc;
2984 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2985 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2987 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2988 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2990 int param = ISEQ_BODY(iseq)->param.size;
2991 int local = ISEQ_BODY(iseq)->local_table_size;
2992 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2999 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3002 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3003 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3005 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3006 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3007 VALUE *
const argv = cfp->sp - calling->argc;
3008 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3011 for (i=0; i<kw_param->num; i++) {
3012 klocals[i] = kw_param->default_values[i];
3019 int param = ISEQ_BODY(iseq)->param.size;
3020 int local = ISEQ_BODY(iseq)->local_table_size;
3021 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3031 cfp->sp -= (calling->argc + 1);
3032 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3033 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3042 st_table *dup_check_table = vm->unused_block_warning_table;
3052 .v = (
VALUE)cme->def,
3056 if (!strict_unused_block) {
3057 key = (st_data_t)cme->def->original_id;
3059 if (st_lookup(dup_check_table, key, NULL)) {
3069 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3074 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3075 fprintf(stderr,
"key:%p\n", (
void *)key);
3079 if (st_insert(dup_check_table, key, 1)) {
3084 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3086 if (!
NIL_P(m_loc)) {
3087 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3091 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3098 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3103 VM_ASSERT((vm_ci_argc(ci), 1));
3104 VM_ASSERT(vm_cc_cme(cc) != NULL);
3106 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3107 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3108 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3109 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3112 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3113 if (LIKELY(rb_simple_iseq_p(iseq))) {
3115 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3116 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3118 if (calling->argc != lead_num) {
3119 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3123 VM_ASSERT(cc == calling->cc);
3125 if (vm_call_iseq_optimizable_p(ci, cc)) {
3126 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3128 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3129 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3130 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3133 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3138 else if (rb_iseq_only_optparam_p(iseq)) {
3141 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3142 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3144 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3145 const int argc = calling->argc;
3146 const int opt = argc - lead_num;
3148 if (opt < 0 || opt > opt_num) {
3149 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3152 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3153 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3154 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3155 vm_call_cacheable(ci, cc));
3158 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3159 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3160 vm_call_cacheable(ci, cc));
3164 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3165 for (
int i=argc; i<lead_num + opt_num; i++) {
3168 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3170 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3171 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3172 const int argc = calling->argc;
3173 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3175 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3178 if (argc - kw_arg->keyword_len == lead_num) {
3179 const int ci_kw_len = kw_arg->keyword_len;
3180 const VALUE *
const ci_keywords = kw_arg->keywords;
3182 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3184 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3185 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3187 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3188 vm_call_cacheable(ci, cc));
3193 else if (argc == lead_num) {
3195 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3196 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3198 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3200 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3201 vm_call_cacheable(ci, cc));
3227 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3228 bool can_fastpath =
true;
3230 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3232 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3233 ci = vm_ci_new_runtime(
3239 ci = forward_cd->caller_ci;
3241 can_fastpath =
false;
3245 if (!vm_ci_markable(ci)) {
3246 ci = vm_ci_new_runtime(
3251 can_fastpath =
false;
3253 argv[param_size - 1] = (
VALUE)ci;
3254 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3258 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3285 const VALUE * lep = VM_CF_LEP(cfp);
3291 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3296 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3300 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3302 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3303 VALUE * to = cfp->sp - 1;
3307 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3312 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3314 cfp->sp = to + argc;
3333 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3336 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3337 int param_size = ISEQ_BODY(iseq)->param.size;
3338 int local_size = ISEQ_BODY(iseq)->local_table_size;
3340 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3342 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3343 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3349 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3352 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3353 int param_size = ISEQ_BODY(iseq)->param.size;
3354 int local_size = ISEQ_BODY(iseq)->local_table_size;
3356 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3359 local_size = local_size + vm_ci_argc(calling->cd->ci);
3360 param_size = param_size + vm_ci_argc(calling->cd->ci);
3362 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3363 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3368 int opt_pc,
int param_size,
int local_size)
3373 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3374 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3377 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3383 int opt_pc,
int param_size,
int local_size)
3385 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3386 VALUE *argv = cfp->sp - calling->argc;
3387 VALUE *sp = argv + param_size;
3388 cfp->sp = argv - 1 ;
3390 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3391 calling->block_handler, (
VALUE)me,
3392 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3393 local_size - param_size,
3394 ISEQ_BODY(iseq)->stack_max);
3403 VALUE *argv = cfp->sp - calling->argc;
3405 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3406 VALUE *src_argv = argv;
3407 VALUE *sp_orig, *sp;
3408 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3410 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3411 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3412 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3413 dst_captured->code.val = src_captured->code.val;
3414 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3415 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3418 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3422 vm_pop_frame(ec, cfp, cfp->ep);
3425 sp_orig = sp = cfp->sp;
3428 sp[0] = calling->recv;
3432 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3433 *sp++ = src_argv[i];
3436 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3437 calling->recv, calling->block_handler, (
VALUE)me,
3438 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3439 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3440 ISEQ_BODY(iseq)->stack_max);
3448 ractor_unsafe_check(
void)
3450 if (!rb_ractor_main_p()) {
3451 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3458 ractor_unsafe_check();
3466 ractor_unsafe_check();
3468 return (*f)(argc, argv, recv);
3474 ractor_unsafe_check();
3482 ractor_unsafe_check();
3484 return (*f)(recv, argv[0]);
3490 ractor_unsafe_check();
3492 return (*f)(recv, argv[0], argv[1]);
3498 ractor_unsafe_check();
3500 return (*f)(recv, argv[0], argv[1], argv[2]);
3506 ractor_unsafe_check();
3508 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3514 ractor_unsafe_check();
3515 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3516 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3522 ractor_unsafe_check();
3523 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3524 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3530 ractor_unsafe_check();
3531 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3532 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3538 ractor_unsafe_check();
3539 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3540 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3546 ractor_unsafe_check();
3547 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3548 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3554 ractor_unsafe_check();
3555 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3556 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3562 ractor_unsafe_check();
3563 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3564 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3570 ractor_unsafe_check();
3571 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3572 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3578 ractor_unsafe_check();
3579 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3580 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3586 ractor_unsafe_check();
3587 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3588 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3594 ractor_unsafe_check();
3595 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3596 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3610 return (*f)(argc, argv, recv);
3624 return (*f)(recv, argv[0]);
3631 return (*f)(recv, argv[0], argv[1]);
3638 return (*f)(recv, argv[0], argv[1], argv[2]);
3645 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3651 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3652 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3658 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3659 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3665 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3686 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3687 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3693 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3694 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3707 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3708 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3714 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3715 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3721 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3722 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3728 const int ov_flags = RAISED_STACKOVERFLOW;
3729 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3730 if (rb_ec_raised_p(ec, ov_flags)) {
3731 rb_ec_raised_reset(ec, ov_flags);
3737 #define CHECK_CFP_CONSISTENCY(func) \
3738 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3739 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3745 #if VM_DEBUG_VERIFY_METHOD_CACHE
3746 switch (me->def->type) {
3747 case VM_METHOD_TYPE_CFUNC:
3748 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3750 # define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3752 METHOD_BUG(ATTRSET);
3754 METHOD_BUG(BMETHOD);
3757 METHOD_BUG(OPTIMIZED);
3758 METHOD_BUG(MISSING);
3759 METHOD_BUG(REFINED);
3763 rb_bug(
"wrong method type: %d", me->def->type);
3766 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3773 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3780 VALUE recv = calling->recv;
3781 VALUE block_handler = calling->block_handler;
3782 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3784 if (UNLIKELY(calling->kw_splat)) {
3785 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3788 VM_ASSERT(reg_cfp == ec->cfp);
3790 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3793 vm_push_frame(ec, NULL, frame_type, recv,
3794 block_handler, (
VALUE)me,
3795 0, ec->cfp->sp, 0, 0);
3797 int len = cfunc->argc;
3800 reg_cfp->sp = stack_bottom;
3801 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3803 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3805 rb_vm_pop_frame(ec);
3807 VM_ASSERT(ec->cfp->sp == stack_bottom);
3809 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3810 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3820 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3822 VALUE *sp = ec->cfp->sp;
3823 VALUE recv = *(sp - recv_idx - 1);
3824 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3825 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3826 #if VM_CHECK_MODE > 0
3828 *(GET_EC()->cfp->sp) =
Qfalse;
3830 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3835 rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3837 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3843 int argc = calling->argc;
3844 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3845 VALUE *argv = &stack_bottom[1];
3847 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3854 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3856 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3858 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3859 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3862 VALUE *stack_bottom = reg_cfp->sp - 2;
3864 VM_ASSERT(calling->argc == 1);
3868 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3871 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3873 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3880 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3883 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3884 return vm_call_cfunc_other(ec, reg_cfp, calling);
3888 calling->kw_splat = 0;
3890 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3891 VALUE *sp = stack_bottom;
3892 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3893 for(i = 0; i < argc; i++) {
3898 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3904 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3905 VALUE argv_ary = reg_cfp->sp[-1];
3909 int argc_offset = 0;
3911 if (UNLIKELY(argc > 0 &&
3913 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3915 return vm_call_cfunc_other(ec, reg_cfp, calling);
3919 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3925 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3926 VALUE keyword_hash = reg_cfp->sp[-1];
3929 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3932 return vm_call_cfunc_other(ec, reg_cfp, calling);
3939 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3941 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3942 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3944 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3945 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3947 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3949 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3950 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3954 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3955 return vm_call_cfunc_other(ec, reg_cfp, calling);
3962 RB_DEBUG_COUNTER_INC(ccf_ivar);
3964 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3971 RB_DEBUG_COUNTER_INC(ccf_attrset);
3972 VALUE val = *(cfp->sp - 1);
3974 attr_index_t index = vm_cc_attr_index(cc);
3975 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3976 ID id = vm_cc_cme(cc)->def->body.attr.id;
3977 rb_check_frozen(obj);
3978 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3987 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3988 if (!UNDEF_P(res)) {
3993 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4001 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4011 VALUE procv = cme->def->body.bmethod.proc;
4014 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4019 GetProcPtr(procv, proc);
4020 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4030 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4034 VALUE procv = cme->def->body.bmethod.proc;
4037 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4042 GetProcPtr(procv, proc);
4043 const struct rb_block *block = &proc->block;
4045 while (vm_block_type(block) == block_type_proc) {
4046 block = vm_proc_block(block->as.proc);
4048 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4051 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4052 VALUE *
const argv = cfp->sp - calling->argc;
4053 const int arg_size = ISEQ_BODY(iseq)->param.size;
4056 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4057 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4060 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4065 vm_push_frame(ec, iseq,
4066 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4068 VM_GUARDED_PREV_EP(captured->ep),
4070 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4072 ISEQ_BODY(iseq)->local_table_size - arg_size,
4073 ISEQ_BODY(iseq)->stack_max);
4081 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4085 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4086 if (UNLIKELY(calling->heap_argv)) {
4091 argc = calling->argc;
4094 cfp->sp += - argc - 1;
4097 return vm_call_bmethod_body(ec, calling, argv);
4103 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4107 VALUE procv = cme->def->body.bmethod.proc;
4109 GetProcPtr(procv, proc);
4110 const struct rb_block *block = &proc->block;
4112 while (vm_block_type(block) == block_type_proc) {
4113 block = vm_proc_block(block->as.proc);
4115 if (vm_block_type(block) == block_type_iseq) {
4116 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4117 return vm_call_iseq_bmethod(ec, cfp, calling);
4120 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4121 return vm_call_noniseq_bmethod(ec, cfp, calling);
4125 rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4127 VALUE klass = current_class;
4135 while (
RTEST(klass)) {
4137 if (owner == target_owner) {
4143 return current_class;
4152 if (orig_me->defined_class == 0) {
4153 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4154 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4155 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4157 if (me->def->reference_count == 1) {
4158 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4162 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4170 VM_ASSERT(callable_method_entry_p(cme));
4177 return aliased_callable_method_entry(me);
4183 calling->cc = &VM_CC_ON_STACK(
Qundef,
4186 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4188 return vm_call_method_each_type(ec, cfp, calling);
4191 static enum method_missing_reason
4194 enum method_missing_reason stat = MISSING_NOENTRY;
4195 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4196 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4197 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4207 ASSUME(calling->argc >= 0);
4209 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4210 int argc = calling->argc;
4211 VALUE recv = calling->recv;
4214 flags |= VM_CALL_OPT_SEND;
4216 if (UNLIKELY(! mid)) {
4217 mid = idMethodMissing;
4218 missing_reason = ci_missing_reason(ci);
4219 ec->method_missing_reason = missing_reason;
4222 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4227 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4228 VALUE exc = rb_make_no_method_exception(
4250 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4253 argc = ++calling->argc;
4258 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4259 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4260 VALUE exc = rb_make_no_method_exception(
4273 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4279 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4280 calling->cd = &new_fcd.cd;
4284 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4285 new_fcd.caller_ci = caller_ci;
4288 calling->cc = &VM_CC_ON_STACK(klass,
4290 { .method_missing_reason = missing_reason },
4291 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4293 if (flags & VM_CALL_FCALL) {
4294 return vm_call_method(ec, reg_cfp, calling);
4298 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4300 if (vm_cc_cme(cc) != NULL) {
4301 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4302 case METHOD_VISI_PUBLIC:
4303 return vm_call_method_each_type(ec, reg_cfp, calling);
4304 case METHOD_VISI_PRIVATE:
4305 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4307 case METHOD_VISI_PROTECTED:
4308 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4311 VM_UNREACHABLE(vm_call_method);
4313 return vm_call_method_missing(ec, reg_cfp, calling);
4316 return vm_call_method_nome(ec, reg_cfp, calling);
4326 i = calling->argc - 1;
4328 if (calling->argc == 0) {
4353 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4359 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4361 int flags = VM_CALL_FCALL;
4365 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4366 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4368 flags |= VM_CALL_ARGS_SPLAT;
4369 if (calling->kw_splat) {
4370 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4371 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4372 calling->kw_splat = 0;
4374 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4377 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4378 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4384 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4385 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4391 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4394 int flags = vm_ci_flag(ci);
4396 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4397 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4398 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4399 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4400 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4401 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4404 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4405 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4410 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4412 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4414 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4415 unsigned int argc, flag;
4417 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4418 argc = ++calling->argc;
4421 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4422 vm_check_canary(ec, reg_cfp->sp);
4426 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4429 ec->method_missing_reason = reason;
4433 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4439 if (!(flag & VM_CALL_FORWARDING)) {
4440 calling->cd = &new_fcd.cd;
4444 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4445 new_fcd.caller_ci = caller_ci;
4449 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4450 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4451 return vm_call_method(ec, reg_cfp, calling);
4457 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4468 return vm_call_method_nome(ec, cfp, calling);
4470 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4471 cme->def->body.refined.orig_me) {
4472 cme = refined_method_callable_without_refinement(cme);
4475 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4477 return vm_call_method_each_type(ec, cfp, calling);
4481 find_refinement(
VALUE refinements,
VALUE klass)
4483 if (
NIL_P(refinements)) {
4495 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4496 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4499 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4500 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4504 }
while (cfp->iseq != local_iseq);
4515 if (orig_me->defined_class == 0) {
4523 VM_ASSERT(callable_method_entry_p(cme));
4525 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4535 ID mid = vm_ci_mid(calling->cd->ci);
4536 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4540 for (; cref; cref = CREF_NEXT(cref)) {
4541 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4542 if (
NIL_P(refinement))
continue;
4545 rb_callable_method_entry(refinement, mid);
4548 if (vm_cc_call(cc) == vm_call_super_method) {
4551 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4556 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4557 cme->def != ref_me->def) {
4560 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4569 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4570 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4585 if (calling->cd->cc) {
4586 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4588 return vm_call_method(ec, cfp, calling);
4591 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4592 calling->cc= ref_cc;
4593 return vm_call_method(ec, cfp, calling);
4597 return vm_call_method_nome(ec, cfp, calling);
4603 NOINLINE(
static VALUE
4611 int argc = calling->argc;
4614 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4617 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4623 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4626 VALUE procval = calling->recv;
4627 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4633 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4635 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4638 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4639 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4642 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4643 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4644 return vm_call_general(ec, reg_cfp, calling);
4651 VALUE recv = calling->recv;
4654 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4655 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4657 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4658 return internal_RSTRUCT_GET(recv,
off);
4664 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4666 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4674 VALUE recv = calling->recv;
4677 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4678 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4680 rb_check_frozen(recv);
4682 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4683 internal_RSTRUCT_SET(recv,
off, val);
4691 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4693 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4701 #define VM_CALL_METHOD_ATTR(var, func, nohook) \
4702 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4703 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4704 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4706 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4707 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4718 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4719 case OPTIMIZED_METHOD_TYPE_SEND:
4720 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4721 return vm_call_opt_send(ec, cfp, calling);
4722 case OPTIMIZED_METHOD_TYPE_CALL:
4723 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4724 return vm_call_opt_call(ec, cfp, calling);
4725 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4726 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4727 return vm_call_opt_block_call(ec, cfp, calling);
4728 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4729 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4733 VM_CALL_METHOD_ATTR(v,
4734 vm_call_opt_struct_aref(ec, cfp, calling),
4735 set_vm_cc_ivar(cc); \
4736 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4739 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4740 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4744 VM_CALL_METHOD_ATTR(v,
4745 vm_call_opt_struct_aset(ec, cfp, calling),
4746 set_vm_cc_ivar(cc); \
4747 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4751 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4763 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4765 switch (cme->def->type) {
4766 case VM_METHOD_TYPE_ISEQ:
4767 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4768 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4769 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4772 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4773 return vm_call_iseq_setup(ec, cfp, calling);
4776 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4777 case VM_METHOD_TYPE_CFUNC:
4778 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4779 return vm_call_cfunc(ec, cfp, calling);
4781 case VM_METHOD_TYPE_ATTRSET:
4782 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4786 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4788 if (vm_cc_markable(cc)) {
4789 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4790 VM_CALL_METHOD_ATTR(v,
4791 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4792 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4798 VM_CALLCACHE_UNMARKABLE |
4799 VM_CALLCACHE_ON_STACK,
4805 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4810 VM_CALL_METHOD_ATTR(v,
4811 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4812 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4816 case VM_METHOD_TYPE_IVAR:
4817 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4819 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4820 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4821 VM_CALL_METHOD_ATTR(v,
4822 vm_call_ivar(ec, cfp, calling),
4823 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4826 case VM_METHOD_TYPE_MISSING:
4827 vm_cc_method_missing_reason_set(cc, 0);
4828 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4829 return vm_call_method_missing(ec, cfp, calling);
4831 case VM_METHOD_TYPE_BMETHOD:
4832 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4833 return vm_call_bmethod(ec, cfp, calling);
4835 case VM_METHOD_TYPE_ALIAS:
4836 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4837 return vm_call_alias(ec, cfp, calling);
4839 case VM_METHOD_TYPE_OPTIMIZED:
4840 return vm_call_optimized(ec, cfp, calling, ci, cc);
4842 case VM_METHOD_TYPE_UNDEF:
4845 case VM_METHOD_TYPE_ZSUPER:
4846 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4848 case VM_METHOD_TYPE_REFINED:
4851 return vm_call_refined(ec, cfp, calling);
4854 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4864 const int stat = ci_missing_reason(ci);
4866 if (vm_ci_mid(ci) == idMethodMissing) {
4867 if (UNLIKELY(calling->heap_argv)) {
4872 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4873 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4877 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4889 VALUE defined_class = me->defined_class;
4890 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4891 return NIL_P(refined_class) ? defined_class : refined_class;
4900 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4902 if (vm_cc_cme(cc) != NULL) {
4903 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4904 case METHOD_VISI_PUBLIC:
4905 return vm_call_method_each_type(ec, cfp, calling);
4907 case METHOD_VISI_PRIVATE:
4908 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4909 enum method_missing_reason stat = MISSING_PRIVATE;
4910 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4912 vm_cc_method_missing_reason_set(cc, stat);
4913 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4914 return vm_call_method_missing(ec, cfp, calling);
4916 return vm_call_method_each_type(ec, cfp, calling);
4918 case METHOD_VISI_PROTECTED:
4919 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4920 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4922 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4923 return vm_call_method_missing(ec, cfp, calling);
4927 VM_ASSERT(vm_cc_cme(cc) != NULL);
4930 calling->cc = &cc_on_stack;
4931 return vm_call_method_each_type(ec, cfp, calling);
4934 return vm_call_method_each_type(ec, cfp, calling);
4941 return vm_call_method_nome(ec, cfp, calling);
4948 RB_DEBUG_COUNTER_INC(ccf_general);
4949 return vm_call_method(ec, reg_cfp, calling);
4955 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4956 VM_ASSERT(cc != vm_cc_empty());
4958 *(vm_call_handler *)&cc->call_ = vm_call_general;
4964 RB_DEBUG_COUNTER_INC(ccf_super_method);
4969 if (ec == NULL)
rb_bug(
"unreachable");
4972 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4973 return vm_call_method(ec, reg_cfp, calling);
4979 vm_search_normal_superclass(
VALUE klass)
4984 klass =
RBASIC(klass)->klass;
4986 klass = RCLASS_ORIGIN(klass);
4990 NORETURN(
static void vm_super_outside(
void));
4993 vm_super_outside(
void)
4999 empty_cc_for_super(
void)
5001 return &vm_empty_cc_for_super;
5007 VALUE current_defined_class;
5014 current_defined_class = vm_defined_class_for_protected_call(me);
5017 reg_cfp->iseq != method_entry_iseqptr(me) &&
5020 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5024 "self has wrong type to call super in this context: "
5025 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5030 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5032 "implicit argument passing of super from method defined"
5033 " by define_method() is not supported."
5034 " Specify all arguments explicitly.");
5037 ID mid = me->def->original_id;
5039 if (!vm_ci_markable(cd->ci)) {
5040 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5044 cd->ci = vm_ci_new_runtime(mid,
5047 vm_ci_kwarg(cd->ci));
5054 VALUE klass = vm_search_normal_superclass(me->defined_class);
5058 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5062 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5066 if (cached_cme == NULL) {
5068 cd->cc = empty_cc_for_super();
5070 else if (cached_cme->called_id != mid) {
5073 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5077 cd->cc = cc = empty_cc_for_super();
5081 switch (cached_cme->def->type) {
5083 case VM_METHOD_TYPE_REFINED:
5085 case VM_METHOD_TYPE_ATTRSET:
5086 case VM_METHOD_TYPE_IVAR:
5087 vm_cc_call_set(cc, vm_call_super_method);
5095 VM_ASSERT((vm_cc_cme(cc),
true));
5103 block_proc_is_lambda(
const VALUE procval)
5108 GetProcPtr(procval, proc);
5109 return proc->is_lambda;
5119 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5122 int is_lambda = FALSE;
5123 VALUE val, arg, blockarg;
5125 const struct vm_ifunc *ifunc = captured->code.ifunc;
5130 else if (argc == 0) {
5137 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5139 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5141 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5144 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5147 VM_GUARDED_PREV_EP(captured->ep),
5149 0, ec->cfp->sp, 0, 0);
5150 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5151 rb_vm_pop_frame(ec);
5159 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5165 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5174 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5176 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5184 vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5186 VALUE ary, arg0 = argv[0];
5191 VM_ASSERT(argv[0] == arg0);
5199 if (rb_simple_iseq_p(iseq)) {
5203 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5205 if (arg_setup_type == arg_setup_block &&
5206 calling->argc == 1 &&
5207 ISEQ_BODY(iseq)->param.flags.has_lead &&
5208 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5209 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5210 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5213 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5214 if (arg_setup_type == arg_setup_block) {
5215 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5217 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5218 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5219 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5221 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5222 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5226 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5233 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5242 calling = &calling_entry;
5243 calling->argc = argc;
5244 calling->block_handler = block_handler;
5245 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5247 calling->heap_argv = 0;
5248 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5250 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5258 bool is_lambda,
VALUE block_handler)
5261 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5262 const int arg_size = ISEQ_BODY(iseq)->param.size;
5263 VALUE *
const rsp = GET_SP() - calling->argc;
5264 VALUE *
const argv = rsp;
5265 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5269 vm_push_frame(ec, iseq,
5270 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5272 VM_GUARDED_PREV_EP(captured->ep), 0,
5273 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5275 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5283 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5285 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5286 int flags = vm_ci_flag(ci);
5288 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5289 ((calling->argc == 0) ||
5290 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5291 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5292 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5293 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5295 if (UNLIKELY(calling->heap_argv)) {
5296 #if VM_ARGC_STACK_MAX < 0
5304 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5305 reg_cfp->sp[-2] = calling->recv;
5306 flags |= VM_CALL_ARGS_SPLAT;
5309 if (calling->argc < 1) {
5312 calling->recv = TOPN(--calling->argc);
5314 if (calling->kw_splat) {
5315 flags |= VM_CALL_KW_SPLAT;
5319 if (calling->argc < 1) {
5322 calling->recv = TOPN(--calling->argc);
5325 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5331 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5336 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5337 argc = calling->argc;
5338 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5344 vm_proc_to_block_handler(
VALUE procval)
5346 const struct rb_block *block = vm_proc_block(procval);
5348 switch (vm_block_type(block)) {
5349 case block_type_iseq:
5350 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5351 case block_type_ifunc:
5352 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5353 case block_type_symbol:
5354 return VM_BH_FROM_SYMBOL(block->as.symbol);
5355 case block_type_proc:
5356 return VM_BH_FROM_PROC(block->as.proc);
5358 VM_UNREACHABLE(vm_yield_with_proc);
5365 bool is_lambda,
VALUE block_handler)
5367 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5368 VALUE proc = VM_BH_TO_PROC(block_handler);
5369 is_lambda = block_proc_is_lambda(proc);
5370 block_handler = vm_proc_to_block_handler(proc);
5373 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5379 bool is_lambda,
VALUE block_handler)
5383 bool is_lambda,
VALUE block_handler);
5385 switch (vm_block_handler_type(block_handler)) {
5386 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5387 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5388 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5389 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5390 default:
rb_bug(
"vm_invoke_block: unreachable");
5393 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5397 vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5404 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5407 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5408 captured->code.iseq = blockiseq;
5410 return rb_vm_make_proc(ec, captured,
rb_cProc);
5414 vm_once_exec(
VALUE iseq)
5421 vm_once_clear(
VALUE data)
5424 is->once.running_thread = NULL;
5436 args[0] = obj; args[1] =
Qfalse;
5438 if (!UNDEF_P(r) &&
RTEST(r)) {
5450 enum defined_type
type = (
enum defined_type)op_type;
5457 return rb_gvar_defined(
SYM2ID(obj));
5459 case DEFINED_CVAR: {
5460 const rb_cref_t *cref = vm_get_cref(GET_EP());
5461 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5466 case DEFINED_CONST_FROM: {
5467 bool allow_nil =
type == DEFINED_CONST;
5469 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5474 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5476 case DEFINED_METHOD:{
5481 switch (METHOD_ENTRY_VISI(me)) {
5482 case METHOD_VISI_PRIVATE:
5484 case METHOD_VISI_PROTECTED:
5488 case METHOD_VISI_PUBLIC:
5492 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5496 return check_respond_to_missing(obj, v);
5501 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5505 case DEFINED_ZSUPER:
5510 VALUE klass = vm_search_normal_superclass(me->defined_class);
5511 if (!klass)
return false;
5513 ID id = me->def->original_id;
5520 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5522 rb_bug(
"unimplemented defined? type (VM)");
5532 return vm_defined(ec, reg_cfp, op_type, obj, v);
5535 static const VALUE *
5536 vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5539 const VALUE *ep = reg_ep;
5540 for (i = 0; i < lv; i++) {
5541 ep = GET_PREV_EP(ep);
5547 vm_get_special_object(
const VALUE *
const reg_ep,
5548 enum vm_special_object_type
type)
5551 case VM_SPECIAL_OBJECT_VMCORE:
5552 return rb_mRubyVMFrozenCore;
5553 case VM_SPECIAL_OBJECT_CBASE:
5554 return vm_get_cbase(reg_ep);
5555 case VM_SPECIAL_OBJECT_CONST_BASE:
5556 return vm_get_const_base(reg_ep);
5558 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5565 const VALUE ary2 = ary2st;
5566 VALUE tmp1 = rb_check_to_array(ary1);
5567 VALUE tmp2 = rb_check_to_array(ary2);
5587 const VALUE ary2 = ary2st;
5588 VALUE tmp2 = rb_check_to_array(ary2);
5602 return vm_concat_array(ary1, ary2st);
5606 rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5608 return vm_concat_to_array(ary1, ary2st);
5614 VALUE tmp = rb_check_to_array(ary);
5618 else if (
RTEST(flag)) {
5631 return vm_splat_array(flag, ary);
5637 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5639 if (flag & VM_CHECKMATCH_ARRAY) {
5643 for (i = 0; i < n; i++) {
5645 VALUE c = check_match(ec, v, target,
type);
5654 return check_match(ec, pattern, target,
type);
5661 return vm_check_match(ec, target, pattern, flag);
5665 vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5667 const VALUE kw_bits = *(ep - bits);
5670 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5671 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5684 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5685 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5686 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5687 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5691 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5694 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5697 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5700 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5707 vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5712 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5713 return rb_public_const_get_at(cbase,
id);
5721 vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5726 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5731 "superclass mismatch for class %"PRIsVALUE
"",
5744 vm_check_if_module(
ID id,
VALUE mod)
5763 vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5766 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5774 vm_declare_module(
ID id,
VALUE cbase)
5780 NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5787 VALUE location = rb_const_source_location_at(cbase,
id);
5788 if (!
NIL_P(location)) {
5789 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5790 " previous definition of %"PRIsVALUE
" was here",
5797 vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5801 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5803 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5807 vm_check_if_namespace(cbase);
5811 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5812 if (!vm_check_if_class(
id, flags, super, klass))
5813 unmatched_redefinition(
"class", cbase,
id, klass);
5817 return vm_declare_class(
id, flags, cbase, super);
5822 vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5826 vm_check_if_namespace(cbase);
5827 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5828 if (!vm_check_if_module(
id, mod))
5829 unmatched_redefinition(
"module", cbase,
id, mod);
5833 return vm_declare_module(
id, cbase);
5838 vm_find_or_create_class_by_id(
ID id,
5843 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5846 case VM_DEFINECLASS_TYPE_CLASS:
5848 return vm_define_class(
id, flags, cbase, super);
5850 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5854 case VM_DEFINECLASS_TYPE_MODULE:
5856 return vm_define_module(
id, flags, cbase);
5859 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5863 static rb_method_visibility_t
5868 if (!vm_env_cref_by_cref(cfp->ep)) {
5869 return METHOD_VISI_PUBLIC;
5872 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5881 if (!vm_env_cref_by_cref(cfp->ep)) {
5885 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5893 rb_method_visibility_t visi;
5898 visi = METHOD_VISI_PUBLIC;
5901 klass = CREF_CLASS_FOR_DEFINITION(cref);
5902 visi = vm_scope_visibility_get(ec);
5909 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5913 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5916 if (!is_singleton && vm_scope_module_func_check(ec)) {
5918 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5928 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5930 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5931 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5934 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5938 enum method_explorer_type {
5940 mexp_search_invokeblock,
5949 VALUE block_handler,
5950 enum method_explorer_type method_explorer
5955 int argc = vm_ci_argc(ci);
5956 VALUE recv = TOPN(argc);
5958 .block_handler = block_handler,
5959 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5965 switch (method_explorer) {
5966 case mexp_search_method:
5967 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5968 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5970 case mexp_search_super:
5971 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5972 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5974 case mexp_search_invokeblock:
5975 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5992 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5993 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5995 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5997 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6002 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6003 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6014 VALUE bh = VM_BLOCK_HANDLER_NONE;
6015 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6030 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6031 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6033 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6035 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6040 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6041 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6052 VALUE bh = VM_BLOCK_HANDLER_NONE;
6053 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6081 if (check_method_basic_definition(vm_cc_cme(cc))) {
6090 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6096 val = rb_mod_to_s(recv);
6102 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6103 return rb_nil_to_s(recv);
6107 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6108 return rb_true_to_s(recv);
6112 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6113 return rb_false_to_s(recv);
6117 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6118 return rb_fix_to_s(recv);
6126 vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6128 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6137 vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6139 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6148 vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6150 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6159 #define id_cmp idCmp
6164 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6168 VALUE args[1] = {target};
6171 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6174 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6181 return vm_opt_duparray_include_p(ec, ary, target);
6187 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6193 rb_snum_t i = num - 1;
6196 if (OPTIMIZED_CMP(v, result) > 0) {
6211 return vm_opt_newarray_max(ec, num,
ptr);
6217 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6223 rb_snum_t i = num - 1;
6226 if (OPTIMIZED_CMP(v, result) < 0) {
6241 return vm_opt_newarray_min(ec, num,
ptr);
6248 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6249 return rb_ary_hash_values(num,
ptr);
6259 return vm_opt_newarray_hash(ec, num,
ptr);
6268 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6270 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6274 VALUE args[1] = {target};
6282 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6288 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6290 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6291 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6301 if (!UNDEF_P(buffer)) {
6302 args[1] = rb_hash_new_with_size(1);
6308 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6315 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6321 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6327 vm_track_constant_cache(
ID id,
void *ic)
6330 struct rb_id_table *const_cache = vm->constant_cache;
6331 VALUE lookup_result;
6334 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6338 ics = st_init_numtable();
6339 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6354 vm->inserting_constant_cache_id = id;
6356 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6358 vm->inserting_constant_cache_id = (
ID)0;
6366 for (
int i = 0; segments[i]; i++) {
6367 ID id = segments[i];
6368 if (
id == idNULL)
continue;
6369 vm_track_constant_cache(
id, ic);
6379 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6380 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6382 return (ic_cref == NULL ||
6383 ic_cref == vm_get_cref(reg_ep));
6391 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6392 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6397 rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6399 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6405 if (ruby_vm_const_missing_count > 0) {
6406 ruby_vm_const_missing_count = 0;
6413 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6418 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6419 rb_yjit_constant_ic_update(iseq, ic, pos);
6420 rb_rjit_constant_ic_update(iseq, ic, pos);
6429 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6432 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6435 ruby_vm_constant_cache_misses++;
6436 val = vm_get_ev_const_chain(ec, segments);
6437 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6440 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6452 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6453 return is->once.value;
6455 else if (is->once.running_thread == NULL) {
6457 is->once.running_thread = th;
6461 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6464 else if (is->once.running_thread == th) {
6466 return vm_once_exec((
VALUE)iseq);
6470 RUBY_VM_CHECK_INTS(ec);
6477 vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6479 switch (OBJ_BUILTIN_TYPE(key)) {
6485 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6486 SYMBOL_REDEFINED_OP_FLAG |
6487 INTEGER_REDEFINED_OP_FLAG |
6488 FLOAT_REDEFINED_OP_FLAG |
6489 NIL_REDEFINED_OP_FLAG |
6490 TRUE_REDEFINED_OP_FLAG |
6491 FALSE_REDEFINED_OP_FLAG |
6492 STRING_REDEFINED_OP_FLAG)) {
6496 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6500 if (rb_hash_stlike_lookup(hash, key, &val)) {
6511 NORETURN(
static void
6520 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6521 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6522 static const char stack_consistency_error[] =
6523 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6524 #if defined RUBY_DEVEL
6530 rb_bug(stack_consistency_error, nsp, nbp);
6537 if (FIXNUM_2_P(recv, obj) &&
6538 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6539 return rb_fix_plus_fix(recv, obj);
6541 else if (FLONUM_2_P(recv, obj) &&
6542 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6550 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6555 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6556 return rb_str_opt_plus(recv, obj);
6560 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6571 if (FIXNUM_2_P(recv, obj) &&
6572 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6573 return rb_fix_minus_fix(recv, obj);
6575 else if (FLONUM_2_P(recv, obj) &&
6576 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6584 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6595 if (FIXNUM_2_P(recv, obj) &&
6596 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6597 return rb_fix_mul_fix(recv, obj);
6599 else if (FLONUM_2_P(recv, obj) &&
6600 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6608 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6619 if (FIXNUM_2_P(recv, obj) &&
6620 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6621 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6623 else if (FLONUM_2_P(recv, obj) &&
6624 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6625 return rb_flo_div_flo(recv, obj);
6632 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6633 return rb_flo_div_flo(recv, obj);
6643 if (FIXNUM_2_P(recv, obj) &&
6644 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6645 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6647 else if (FLONUM_2_P(recv, obj) &&
6648 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6656 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6667 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6668 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6670 if (!UNDEF_P(val)) {
6671 return RBOOL(!
RTEST(val));
6681 if (FIXNUM_2_P(recv, obj) &&
6682 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6685 else if (FLONUM_2_P(recv, obj) &&
6686 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6694 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6706 if (FIXNUM_2_P(recv, obj) &&
6707 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6710 else if (FLONUM_2_P(recv, obj) &&
6711 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6719 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6731 if (FIXNUM_2_P(recv, obj) &&
6732 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6735 else if (FLONUM_2_P(recv, obj) &&
6736 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6744 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6756 if (FIXNUM_2_P(recv, obj) &&
6757 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6760 else if (FLONUM_2_P(recv, obj) &&
6761 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6769 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6786 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6795 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6813 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6824 if (FIXNUM_2_P(recv, obj) &&
6825 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6837 if (FIXNUM_2_P(recv, obj) &&
6838 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6839 return rb_fix_aref(recv, obj);
6844 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6846 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6849 return rb_ary_aref1(recv, obj);
6853 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6868 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6874 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6887 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6888 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6889 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6900 return vm_opt_aref_with(recv, key);
6907 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6908 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6917 vm_opt_length(
VALUE recv,
int bop)
6923 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6924 if (bop == BOP_EMPTY_P) {
6932 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6936 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6945 vm_opt_empty_p(
VALUE recv)
6947 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6960 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6963 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6979 case RSHIFT(~0UL, 1):
6982 return
rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7000 vm_opt_succ(
VALUE recv)
7003 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7004 return fix_succ(recv);
7010 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7021 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7022 return RBOOL(!
RTEST(recv));
7037 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7041 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7059 VALUE self = GET_SELF();
7061 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7063 if (event & global_hooks->events) {
7066 vm_dtrace(event, ec);
7067 rb_exec_event_hook_orig(ec, global_hooks, event,
self, 0, 0, 0 , val, 0);
7073 if (local_hooks != NULL) {
7074 if (event & local_hooks->events) {
7077 rb_exec_event_hook_orig(ec, local_hooks, event,
self, 0, 0, 0 , val, 0);
7083 #define VM_TRACE_HOOK(target_event, val) do { \
7084 if ((pc_events & (target_event)) & enabled_flags) { \
7085 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7092 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7093 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7094 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7100 const VALUE *pc = reg_cfp->pc;
7101 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7104 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7110 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7113 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7114 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7118 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7119 enabled_flags |= iseq_local_events;
7121 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7123 if (bmethod_frame) {
7125 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7126 bmethod_local_hooks = me->def->body.bmethod.hooks;
7127 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7128 if (bmethod_local_hooks) {
7129 bmethod_local_events = bmethod_local_hooks->events;
7134 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7138 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7146 else if (ec->trace_arg != NULL) {
7154 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7157 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7161 (
int)rb_iseq_line_no(iseq, pos),
7164 VM_ASSERT(reg_cfp->pc == pc);
7165 VM_ASSERT(pc_events != 0);
7175 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7176 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7192 #undef VM_TRACE_HOOK
7194 #if VM_CHECK_MODE > 0
7195 NORETURN( NOINLINE( COLDFUNC
7196 void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7199 Init_vm_stack_canary(
void)
7202 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7203 vm_stack_canary |= 0x01;
7205 vm_stack_canary_was_born =
true;
7210 rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7214 const char *insn = rb_insns_name(i);
7218 rb_bug(
"dead canary found at %s: %s", insn, str);
7222 void Init_vm_stack_canary(
void) { }
7254 return (*(rb_invoke_funcptr0_t)funcptr)(ec,
self);
7261 return (*(rb_invoke_funcptr1_t)funcptr)(ec,
self, argv[0]);
7268 return (*(rb_invoke_funcptr2_t)funcptr)(ec,
self, argv[0], argv[1]);
7275 return (*(rb_invoke_funcptr3_t)funcptr)(ec,
self, argv[0], argv[1], argv[2]);
7282 return (*(rb_invoke_funcptr4_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3]);
7289 return (*(rb_invoke_funcptr5_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7296 return (*(rb_invoke_funcptr6_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7303 return (*(rb_invoke_funcptr7_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7310 return (*(rb_invoke_funcptr8_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7316 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7317 return (*(rb_invoke_funcptr9_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7323 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7324 return (*(rb_invoke_funcptr10_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7330 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7331 return (*(rb_invoke_funcptr11_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7337 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7338 return (*(rb_invoke_funcptr12_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7344 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7345 return (*(rb_invoke_funcptr13_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7351 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7352 return (*(rb_invoke_funcptr14_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7358 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7359 return (*(rb_invoke_funcptr15_t)funcptr)(ec,
self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7364 static builtin_invoker
7365 lookup_builtin_invoker(
int argc)
7367 static const builtin_invoker invokers[] = {
7386 return invokers[argc];
7392 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7393 SETUP_CANARY(canary_p);
7394 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7395 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7396 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7403 return invoke_bf(ec, cfp, bf, argv);
7410 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7411 for (
int i=0; i<bf->argc; i++) {
7412 ruby_debug_printf(
":%s ",
rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7414 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7415 (
void *)(uintptr_t)bf->func_ptr);
7418 if (bf->argc == 0) {
7419 return invoke_bf(ec, cfp, bf, NULL);
7422 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7423 return invoke_bf(ec, cfp, bf, argv);
7433 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_raise(VALUE exc_class, const char *fmt,...)
Exception entry point.
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
Interpreter panic switch.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
VALUE rb_eArgError
ArgumentError exception.
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
int rb_during_gc(void)
Queries if the GC is busy.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_unshift(VALUE ary, VALUE elem)
Destructively prepends the passed item at the beginning of the passed array.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
VALUE rb_dbl2big(double d)
Converts a C's double into a bignum.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_hash_aref(VALUE hash, VALUE key)
Queries the given key in the given hash table.
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
Inserts or replaces ("upsert"s) the objects into the given hash table.
VALUE rb_hash_lookup(VALUE hash, VALUE key)
Identical to rb_hash_aref(), except it always returns RUBY_Qnil for misshits.
VALUE rb_hash_dup(VALUE hash)
Duplicates a hash.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_cat_cstr(VALUE dst, const char *src)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
int rb_method_basic_definition_p(VALUE klass, ID mid)
Well...
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
VALUE rb_id2str(ID id)
Identical to rb_id2name(), except it returns a frozen Ruby String instead of a C String.
char * ptr
Pointer to the underlying memory region, of at least capa bytes.
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
VALUE rb_uint2big(uintptr_t i)
Converts a C's intptr_t into an instance of rb_cInteger.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
void * ruby_xcalloc(size_t nelems, size_t elemsiz)
Identical to ruby_xmalloc2(), except it returns a zero-filled storage instance.