11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
34#include "insns_info.inc"
40 int argc,
const VALUE *argv,
int priv);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
78 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
94 ec_stack_overflow(ec, TRUE);
96 ec_stack_overflow(ec, FALSE);
104callable_class_p(
VALUE klass)
106#if VM_CHECK_MODE >= 2
107 if (!klass)
return FALSE;
135 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
137 if (callable_class_p(cme->defined_class)) {
147vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
149 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
150 enum imemo_type cref_or_me_type = imemo_env;
153 cref_or_me_type = imemo_type(cref_or_me);
155 if (
type & VM_FRAME_FLAG_BMETHOD) {
159 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
162 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
167 if (cref_or_me_type != imemo_ment) {
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type != imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
176 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
177 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
187 if (cref_or_me_type == imemo_ment) {
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
195 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
196 VM_ASSERT(iseq == NULL ||
198 RUBY_VM_NORMAL_ISEQ_P(iseq)
202 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
212 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
215#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
217 vm_check_frame_detail(type, req_block, req_me, req_cref, \
218 specval, cref_or_me, is_cframe, iseq); \
220 switch (given_magic) {
222 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
224 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
226 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
227 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
228 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
232 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
237static VALUE vm_stack_canary;
238static bool vm_stack_canary_was_born =
false;
245 unsigned int pos = 0;
246 while (pos < ISEQ_BODY(iseq)->iseq_size) {
247 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
248 unsigned int next_pos = pos + insn_len(opcode);
249 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
254 rb_bug(
"failed to find the previous insn");
263 if (! LIKELY(vm_stack_canary_was_born)) {
266 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
270 else if (! (iseq = GET_ISEQ())) {
273 else if (LIKELY(sp[0] != vm_stack_canary)) {
282 const VALUE *orig = rb_iseq_original_iseq(iseq);
283 const VALUE iseqw = rb_iseqw_new(iseq);
285 const char *stri = rb_str_to_cstr(inspection);
286 const VALUE disasm = rb_iseq_disasm(iseq);
287 const char *strd = rb_str_to_cstr(disasm);
288 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
289 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
290 const char *name = insn_name(insn);
296 "We are killing the stack canary set by %s, "
297 "at %s@pc=%"PRIdPTR
"\n"
298 "watch out the C stack trace.\n"
300 name, stri, pos, strd);
301 rb_bug(
"see above.");
303#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
306#define vm_check_canary(ec, sp)
307#define vm_check_frame(a, b, c, d)
312vm_push_frame_debug_counter_inc(
319 RB_DEBUG_COUNTER_INC(frame_push);
321 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
322 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
323 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
326 RB_DEBUG_COUNTER_INC(frame_R2R);
329 RB_DEBUG_COUNTER_INC(frame_R2C);
334 RB_DEBUG_COUNTER_INC(frame_C2R);
337 RB_DEBUG_COUNTER_INC(frame_C2C);
342 switch (
type & VM_FRAME_MAGIC_MASK) {
343 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
344 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
345 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
346 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
347 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
348 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
349 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
350 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
351 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
354 rb_bug(
"unreachable");
357#define vm_push_frame_debug_counter_inc(ec, cfp, t)
362rb_vm_stack_canary(
void)
365 return vm_stack_canary;
371STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
372STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
373STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
389 vm_check_frame(
type, specval, cref_or_me, iseq);
390 VM_ASSERT(local_size >= 0);
393 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
394 vm_check_canary(ec, sp);
399 for (
int i=0; i < local_size; i++) {
426 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
427 atomic_signal_fence(memory_order_seq_cst);
435 vm_push_frame_debug_counter_inc(ec, cfp,
type);
443 if (VMDEBUG == 2) SDR();
445 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
452 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
454 if (VMDEBUG == 2) SDR();
456 RUBY_VM_CHECK_INTS(ec);
457 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
459 return flags & VM_FRAME_FLAG_FINISH;
465 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
473 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
477 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
479 VM_BLOCK_HANDLER_NONE,
486 return (
VALUE)dmy_iseq;
491rb_arity_error_new(
int argc,
int min,
int max)
493 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
501 rb_str_catf(err_mess,
"..%d", max);
508rb_error_arity(
int argc,
int min,
int max)
515NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
518vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
521 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
522 VM_FORCE_WRITE(&ep[index], v);
523 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
524 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
529vm_env_write(
const VALUE *ep,
int index,
VALUE v)
531 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
532 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
533 VM_STACK_ENV_WRITE(ep, index, v);
536 vm_env_write_slowpath(ep, index, v);
541rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
543 vm_env_write(ep, index, v);
549 if (block_handler == VM_BLOCK_HANDLER_NONE) {
553 switch (vm_block_handler_type(block_handler)) {
554 case block_handler_type_iseq:
555 case block_handler_type_ifunc:
556 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
557 case block_handler_type_symbol:
558 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
559 case block_handler_type_proc:
560 return VM_BH_TO_PROC(block_handler);
562 VM_UNREACHABLE(rb_vm_bh_to_procval);
571vm_svar_valid_p(
VALUE svar)
574 switch (imemo_type(svar)) {
583 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
593 if (lep && (ec == NULL || ec->root_lep != lep)) {
594 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
597 svar = ec->root_svar;
600 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
608 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
610 if (lep && (ec == NULL || ec->root_lep != lep)) {
611 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
614 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
621 const struct vm_svar *svar = lep_svar(ec, lep);
626 case VM_SVAR_LASTLINE:
627 return svar->lastline;
628 case VM_SVAR_BACKREF:
629 return svar->backref;
631 const VALUE ary = svar->others;
637 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
646 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
657 struct vm_svar *svar = lep_svar(ec, lep);
660 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
664 case VM_SVAR_LASTLINE:
667 case VM_SVAR_BACKREF:
671 VALUE ary = svar->others;
676 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
687 val = lep_svar_get(ec, lep, key);
690 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
707 rb_bug(
"unexpected back-ref");
720 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
730 return rb_reg_last_defined(backref);
732 rb_bug(
"unexpected back-ref");
736 nth = (int)(
type >> 1);
743check_method_entry(
VALUE obj,
int can_be_svar)
745 if (obj ==
Qfalse)
return NULL;
748 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
751 switch (imemo_type(obj)) {
762 rb_bug(
"check_method_entry: svar should not be there:");
771 const VALUE *ep = cfp->ep;
774 while (!VM_ENV_LOCAL_P(ep)) {
775 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
776 ep = VM_ENV_PREV_EP(ep);
779 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
785 switch (me->def->type) {
786 case VM_METHOD_TYPE_ISEQ:
787 return me->def->body.iseq.
iseqptr;
796 switch (me->def->type) {
797 case VM_METHOD_TYPE_ISEQ:
798 return me->def->body.iseq.
cref;
804#if VM_CHECK_MODE == 0
808check_cref(
VALUE obj,
int can_be_svar)
810 if (obj ==
Qfalse)
return NULL;
813 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
816 switch (imemo_type(obj)) {
827 rb_bug(
"check_method_entry: svar should not be there:");
834vm_env_cref(
const VALUE *ep)
838 while (!VM_ENV_LOCAL_P(ep)) {
839 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
840 ep = VM_ENV_PREV_EP(ep);
843 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
847is_cref(
const VALUE v,
int can_be_svar)
850 switch (imemo_type(v)) {
863vm_env_cref_by_cref(
const VALUE *ep)
865 while (!VM_ENV_LOCAL_P(ep)) {
866 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
867 ep = VM_ENV_PREV_EP(ep);
869 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
873cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
875 const VALUE v = *vptr;
879 switch (imemo_type(v)) {
882 new_cref = vm_cref_dup(cref);
887 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
892 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
896 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
905vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
907 if (vm_env_cref_by_cref(ep)) {
911 while (!VM_ENV_LOCAL_P(ep)) {
912 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
913 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
916 ep = VM_ENV_PREV_EP(ep);
918 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
919 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
922 rb_bug(
"vm_cref_dup: unreachable");
927vm_get_cref(
const VALUE *ep)
935 rb_bug(
"vm_get_cref: unreachable");
940rb_vm_get_cref(
const VALUE *ep)
942 return vm_get_cref(ep);
953 return vm_get_cref(cfp->ep);
957vm_get_const_key_cref(
const VALUE *ep)
963 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
964 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
967 cref = CREF_NEXT(cref);
980 if (CREF_CLASS(cref) == old_klass) {
981 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
982 *new_cref_ptr = new_cref;
985 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
986 cref = CREF_NEXT(cref);
987 *new_cref_ptr = new_cref;
988 new_cref_ptr = &new_cref->next;
990 *new_cref_ptr = NULL;
999 prev_cref = vm_env_cref(ep);
1005 prev_cref = vm_env_cref(cfp->ep);
1009 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1013vm_get_cbase(
const VALUE *ep)
1015 const rb_cref_t *cref = vm_get_cref(ep);
1017 return CREF_CLASS_FOR_DEFINITION(cref);
1021vm_get_const_base(
const VALUE *ep)
1023 const rb_cref_t *cref = vm_get_cref(ep);
1026 if (!CREF_PUSHED_BY_EVAL(cref)) {
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1029 cref = CREF_NEXT(cref);
1036vm_check_if_namespace(
VALUE klass)
1039 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1044vm_ensure_not_refinement_module(
VALUE self)
1047 rb_warn(
"not defined at the refinement, but at the outer class/module");
1063 if (
NIL_P(orig_klass) && allow_nil) {
1065 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1069 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1070 root_cref = CREF_NEXT(root_cref);
1073 while (cref && CREF_NEXT(cref)) {
1074 if (CREF_PUSHED_BY_EVAL(cref)) {
1078 klass = CREF_CLASS(cref);
1080 cref = CREF_NEXT(cref);
1082 if (!
NIL_P(klass)) {
1086 if ((ce = rb_const_lookup(klass,
id))) {
1087 rb_const_warn_if_deprecated(ce, klass,
id);
1090 if (am == klass)
break;
1092 if (is_defined)
return 1;
1093 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1095 goto search_continue;
1102 if (UNLIKELY(!rb_ractor_main_p())) {
1104 rb_raise(rb_eRactorIsolationError,
1105 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1116 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1117 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1131 vm_check_if_namespace(orig_klass);
1133 return rb_public_const_defined_from(orig_klass,
id);
1136 return rb_public_const_get_from(orig_klass,
id);
1144 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1152 int allow_nil = TRUE;
1153 if (segments[0] == idNULL) {
1158 while (segments[idx]) {
1159 ID id = segments[idx++];
1160 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1173 rb_bug(
"vm_get_cvar_base: no cref");
1176 while (CREF_NEXT(cref) &&
1177 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1178 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1179 cref = CREF_NEXT(cref);
1181 if (top_level_raise && !CREF_NEXT(cref)) {
1185 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1193ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1195fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1198 vm_cc_attr_index_set(cc, index, shape_id);
1201 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1205#define ractor_incidental_shareable_p(cond, val) \
1206 (!(cond) || rb_ractor_shareable_p(val))
1207#define ractor_object_incidental_shareable_p(obj, val) \
1208 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1210#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1218 shape_id_t shape_id;
1222 return default_value;
1225#if SHAPE_IN_BASIC_FLAGS
1226 shape_id = RBASIC_SHAPE_ID(obj);
1234#if !SHAPE_IN_BASIC_FLAGS
1235 shape_id = ROBJECT_SHAPE_ID(obj);
1241 if (UNLIKELY(!rb_ractor_main_p())) {
1249 if (default_value ==
Qundef) {
1257 ivar_list = RCLASS_IVPTR(obj);
1259#if !SHAPE_IN_BASIC_FLAGS
1260 shape_id = RCLASS_SHAPE_ID(obj);
1268 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1269#if !SHAPE_IN_BASIC_FLAGS
1270 shape_id = ivtbl->shape_id;
1272 ivar_list = ivtbl->as.shape.ivptr;
1275 return default_value;
1279 shape_id_t cached_id;
1283 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1286 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1289 if (LIKELY(cached_id == shape_id)) {
1290 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1292 if (index == ATTR_INDEX_NOT_SET) {
1293 return default_value;
1296 val = ivar_list[index];
1297#if USE_DEBUG_COUNTER
1298 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1301 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1307#if USE_DEBUG_COUNTER
1309 if (cached_id != INVALID_SHAPE_ID) {
1310 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1313 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1317 if (cached_id != INVALID_SHAPE_ID) {
1318 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1324 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1327 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1331 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1336 table = (
st_table *)RCLASS_IVPTR(obj);
1340 table = ROBJECT_IV_HASH(obj);
1345 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1346 table = ivtbl->as.complex.table;
1352 if (!table || !st_lookup(table,
id, &val)) {
1353 val = default_value;
1357 shape_id_t previous_cached_id = cached_id;
1358 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1361 if (cached_id != previous_cached_id) {
1362 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1365 if (index == ATTR_INDEX_NOT_SET) {
1366 val = default_value;
1370 val = ivar_list[index];
1376 vm_cc_attr_index_initialize(cc, shape_id);
1379 vm_ic_attr_index_initialize(ic, shape_id);
1382 val = default_value;
1388 if (!UNDEF_P(default_value)) {
1396 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1399 return rb_attr_get(obj,
id);
1407populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1409 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1413 vm_cc_attr_index_set(cc, index, next_shape_id);
1416 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1428 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1431 rb_check_frozen(obj);
1433 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1435 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1437 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1438 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1441 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1451 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1457 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1460NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1462vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1464#if SHAPE_IN_BASIC_FLAGS
1465 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1467 shape_id_t shape_id = rb_generic_shape_id(obj);
1473 if (shape_id == dest_shape_id) {
1474 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1476 else if (dest_shape_id != INVALID_SHAPE_ID) {
1477 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1478 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1480 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1491 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1493 if (shape_id != dest_shape_id) {
1494#if SHAPE_IN_BASIC_FLAGS
1495 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1497 ivtbl->shape_id = dest_shape_id;
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1509vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1517 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1518 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1520 if (LIKELY(shape_id == dest_shape_id)) {
1521 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1524 else if (dest_shape_id != INVALID_SHAPE_ID) {
1525 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1526 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1527 shape_id_t source_shape_id = dest_shape->parent_id;
1529 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1530 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1532 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1534 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1550 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1551 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1557 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1569 VALUE defined_class = 0;
1573 defined_class =
RBASIC(defined_class)->klass;
1576 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1578 rb_bug(
"the cvc table should be set");
1582 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1583 rb_bug(
"should have cvar cache entry");
1588 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1604 cref = vm_get_cref(GET_EP());
1606 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1607 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1609 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1615 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1617 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1623 return vm_getclassvariable(iseq, cfp,
id, ic);
1630 cref = vm_get_cref(GET_EP());
1632 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1633 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1635 rb_class_ivar_set(ic->entry->class_value,
id, val);
1639 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1643 update_classvariable_cache(iseq, klass,
id, cref, ic);
1649 vm_setclassvariable(iseq, cfp,
id, val, ic);
1655 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1666 shape_id_t dest_shape_id;
1668 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1670 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1677 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1681 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1688 vm_setinstancevariable(iseq, obj,
id, val, ic);
1697 ec->tag->state = RUBY_TAG_FATAL;
1700 ec->tag->state = TAG_THROW;
1702 else if (THROW_DATA_P(err)) {
1703 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1706 ec->tag->state = TAG_RAISE;
1713 const int flag,
const VALUE throwobj)
1721 else if (state == TAG_BREAK) {
1723 const VALUE *ep = GET_EP();
1724 const rb_iseq_t *base_iseq = GET_ISEQ();
1725 escape_cfp = reg_cfp;
1727 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1728 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1729 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1730 ep = escape_cfp->ep;
1731 base_iseq = escape_cfp->iseq;
1734 ep = VM_ENV_PREV_EP(ep);
1735 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1736 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1737 VM_ASSERT(escape_cfp->iseq == base_iseq);
1741 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1747 ep = VM_ENV_PREV_EP(ep);
1749 while (escape_cfp < eocfp) {
1750 if (escape_cfp->ep == ep) {
1751 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1752 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1757 for (i=0; i < ct->size; i++) {
1759 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1761 if (entry->type == CATCH_TYPE_BREAK &&
1762 entry->iseq == base_iseq &&
1763 entry->start < epc && entry->end >= epc) {
1764 if (entry->cont == epc) {
1773 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1778 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1781 else if (state == TAG_RETRY) {
1782 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1784 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1786 else if (state == TAG_RETURN) {
1787 const VALUE *current_ep = GET_EP();
1788 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1789 int in_class_frame = 0;
1791 escape_cfp = reg_cfp;
1794 while (!VM_ENV_LOCAL_P(ep)) {
1795 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1798 ep = VM_ENV_PREV_EP(ep);
1802 while (escape_cfp < eocfp) {
1803 const VALUE *lep = VM_CF_LEP(escape_cfp);
1809 if (lep == target_lep &&
1810 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1811 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1816 if (lep == target_lep) {
1817 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1819 if (in_class_frame) {
1824 const VALUE *tep = current_ep;
1826 while (target_lep != tep) {
1827 if (escape_cfp->ep == tep) {
1829 if (tep == target_ep) {
1833 goto unexpected_return;
1836 tep = VM_ENV_PREV_EP(tep);
1840 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1841 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1843 case ISEQ_TYPE_MAIN:
1845 if (in_class_frame)
goto unexpected_return;
1846 if (target_ep == NULL) {
1850 goto unexpected_return;
1854 case ISEQ_TYPE_EVAL: {
1856 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1857 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1858 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1859 t = ISEQ_BODY(is)->type;
1861 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1864 case ISEQ_TYPE_CLASS:
1873 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1874 if (target_ep == NULL) {
1878 goto unexpected_return;
1882 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1885 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1891 rb_bug(
"isns(throw): unsupported throw type");
1894 ec->tag->state = state;
1895 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1900 rb_num_t throw_state,
VALUE throwobj)
1902 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1903 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1906 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1909 return vm_throw_continue(ec, throwobj);
1916 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1922 int is_splat = flag & 0x01;
1925 const VALUE obj = ary;
1937 if (num + is_splat == 0) {
1940 else if (flag & 0x02) {
1945 for (i = 0; i < num -
len; i++) {
1950 for (j = 0; i < num; i++, j++) {
1963 *cfp->sp++ = rb_ary_new();
1972 for (; i < num -
len; i++) {
1976 for (rb_num_t j = 0; i < num; i++, j++) {
1977 *cfp->sp++ = ptr[
len - j - 1];
1981 for (rb_num_t j = 0; j < num; j++) {
1982 *cfp->sp++ = ptr[num - j - 1];
1998#if VM_CHECK_MODE > 0
1999 ccs->debug_sig = ~(
VALUE)ccs;
2005 ccs->entries = NULL;
2007 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2015 if (! vm_cc_markable(cc)) {
2019 if (UNLIKELY(ccs->len == ccs->capa)) {
2020 if (ccs->capa == 0) {
2022 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2026 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2029 VM_ASSERT(ccs->len < ccs->capa);
2031 const int pos = ccs->len++;
2032 ccs->entries[pos].argc = vm_ci_argc(ci);
2033 ccs->entries[pos].flag = vm_ci_flag(ci);
2036 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2042#if VM_CHECK_MODE > 0
2046 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2047 for (
int i=0; i<ccs->len; i++) {
2048 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2049 ccs->entries[i].flag,
2050 ccs->entries[i].argc);
2051 rp(ccs->entries[i].cc);
2058 VM_ASSERT(vm_ccs_p(ccs));
2059 VM_ASSERT(ccs->len <= ccs->capa);
2061 for (
int i=0; i<ccs->len; i++) {
2064 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2065 VM_ASSERT(vm_cc_class_check(cc, klass));
2066 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2067 VM_ASSERT(!vm_cc_super_p(cc));
2068 VM_ASSERT(!vm_cc_refinement_p(cc));
2079 const ID mid = vm_ci_mid(ci);
2080 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2087 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2089 const int ccs_len = ccs->len;
2091 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2092 rb_vm_ccs_free(ccs);
2093 rb_id_table_delete(cc_tbl, mid);
2097 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2102 unsigned int argc = vm_ci_argc(ci);
2103 unsigned int flag = vm_ci_flag(ci);
2105 for (
int i=0; i<ccs_len; i++) {
2106 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2107 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2108 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2110 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2112 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2113 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2115 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2116 VM_ASSERT(ccs_cc->klass == klass);
2117 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2126 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2129 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2135 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2137 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2140 cme = rb_callable_method_entry(klass, mid);
2143 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2147 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2148 return &vm_empty_cc;
2151 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2156 VM_ASSERT(cc_tbl != NULL);
2158 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2164 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2168 cme = rb_check_overloaded_cme(cme, ci);
2170 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2171 vm_ccs_push(klass, ccs, ci, cc);
2173 VM_ASSERT(vm_cc_cme(cc) != NULL);
2174 VM_ASSERT(cme->called_id == mid);
2175 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2189 cc = vm_search_cc(klass, ci);
2192 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2193 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2194 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2195 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2196 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2206#if USE_DEBUG_COUNTER
2210 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2212#if OPT_INLINE_METHOD_CACHE
2216 if (cd_owner && cc != empty_cc) {
2220#if USE_DEBUG_COUNTER
2221 if (!old_cc || old_cc == empty_cc) {
2223 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2225 else if (old_cc == cc) {
2226 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2228 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2229 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2231 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2232 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2233 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2236 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2241 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2242 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2253#if OPT_INLINE_METHOD_CACHE
2254 if (LIKELY(vm_cc_class_check(cc, klass))) {
2255 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2256 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2257 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2258 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2259 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2260 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2264 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2267 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2271 return vm_search_method_slowpath0(cd_owner, cd, klass);
2278 VM_ASSERT(klass !=
Qfalse);
2281 return vm_search_method_fastpath(cd_owner, cd, klass);
2284#if __has_attribute(transparent_union)
2297 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2298 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2299 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2300 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2301 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2302 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2305# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2308# define make_cfunc_type(f) (cfunc_type)(f)
2318 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2319 VM_ASSERT(callable_method_entry_p(me));
2321 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2325#if __has_attribute(transparent_union)
2326 return me->def->body.cfunc.func == func.anyargs;
2328 return me->def->body.cfunc.func == func;
2337 return me && METHOD_ENTRY_BASIC(me);
2343 VM_ASSERT(iseq != NULL);
2345 return check_cfunc(vm_cc_cme(cc), func);
2348#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2349#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2351#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2383opt_equality_specialized(
VALUE recv,
VALUE obj)
2385 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2386 goto compare_by_identity;
2388 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2389 goto compare_by_identity;
2392 goto compare_by_identity;
2401#if MSC_VERSION_BEFORE(1300)
2405 else if (isnan(b)) {
2410 return RBOOL(a == b);
2417 return rb_str_eql_internal(obj, recv);
2422 compare_by_identity:
2423 return RBOOL(recv == obj);
2429 VM_ASSERT(cd_owner != NULL);
2431 VALUE val = opt_equality_specialized(recv, obj);
2432 if (!UNDEF_P(val))
return val;
2434 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2438 return RBOOL(recv == obj);
2442#undef EQ_UNREDEFINED_P
2445NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2448opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2450 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2452 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2453 return RBOOL(recv == obj);
2463 VALUE val = opt_equality_specialized(recv, obj);
2464 if (!UNDEF_P(val)) {
2468 return opt_equality_by_mid_slowpath(recv, obj, mid);
2475 return opt_equality_by_mid(obj1, obj2, idEq);
2481 return opt_equality_by_mid(obj1, obj2, idEqlP);
2491 case VM_CHECKMATCH_TYPE_WHEN:
2493 case VM_CHECKMATCH_TYPE_RESCUE:
2495 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2498 case VM_CHECKMATCH_TYPE_CASE: {
2499 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2502 rb_bug(
"check_match: unreachable");
2507#if MSC_VERSION_BEFORE(1300)
2508#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2510#define CHECK_CMP_NAN(a, b)
2514double_cmp_lt(
double a,
double b)
2516 CHECK_CMP_NAN(a, b);
2517 return RBOOL(a < b);
2521double_cmp_le(
double a,
double b)
2523 CHECK_CMP_NAN(a, b);
2524 return RBOOL(a <= b);
2528double_cmp_gt(
double a,
double b)
2530 CHECK_CMP_NAN(a, b);
2531 return RBOOL(a > b);
2535double_cmp_ge(
double a,
double b)
2537 CHECK_CMP_NAN(a, b);
2538 return RBOOL(a >= b);
2542static inline VALUE *
2547 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2548 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2550 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2551 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2552 int params = ISEQ_BODY(cfp->iseq)->param.size;
2555 bp += vm_ci_argc(ci);
2558 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2562#if VM_DEBUG_BP_CHECK
2563 if (bp != cfp->bp_check) {
2564 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2565 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2566 (
long)(bp - GET_EC()->vm_stack));
2567 rb_bug(
"vm_base_ptr: unreachable");
2580 return vm_base_ptr(cfp);
2595static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2600 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2602 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2608 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2611 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2612 int param = ISEQ_BODY(iseq)->param.size;
2613 int local = ISEQ_BODY(iseq)->local_table_size;
2614 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2620 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2621 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2631rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2633 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2634 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2635 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2644rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2646 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2647 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2651 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2655#define ALLOW_HEAP_ARGV (-2)
2656#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2661 vm_check_canary(GET_EC(), cfp->sp);
2667 int argc = calling->argc;
2669 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2673 VALUE *argv = cfp->sp - argc;
2674 VALUE argv_ary = rb_ary_hidden_new(
len + argc + 1);
2675 rb_ary_cat(argv_ary, argv, argc);
2676 rb_ary_cat(argv_ary, ptr,
len);
2677 cfp->sp -= argc - 1;
2678 cfp->sp[-1] = argv_ary;
2680 calling->heap_argv = argv_ary;
2686 if (max_args >= 0 &&
len + argc > max_args) {
2694 calling->argc +=
len - (max_args - argc + 1);
2695 len = max_args - argc + 1;
2704 calling->heap_argv = 0;
2706 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2708 for (i = 0; i <
len; i++) {
2709 *cfp->sp++ = ptr[i];
2721 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2722 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2723 const VALUE h = rb_hash_new_with_size(kw_len);
2724 VALUE *sp = cfp->sp;
2727 for (i=0; i<kw_len; i++) {
2728 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2732 cfp->sp -= kw_len - 1;
2733 calling->argc -= kw_len - 1;
2734 calling->kw_splat = 1;
2738vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2741 if (keyword_hash !=
Qnil) {
2743 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2746 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2752 keyword_hash = rb_hash_dup(keyword_hash);
2754 return keyword_hash;
2760 const struct rb_callinfo *restrict ci,
int max_args)
2762 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2763 if (IS_ARGS_KW_SPLAT(ci)) {
2765 VM_ASSERT(calling->kw_splat == 1);
2769 VALUE ary = cfp->sp[0];
2770 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2773 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2777 if (UNLIKELY(calling->heap_argv)) {
2778 rb_ary_push(calling->heap_argv, kwh);
2779 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2780 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2781 calling->kw_splat = 0;
2789 VM_ASSERT(calling->kw_splat == 1);
2793 calling->kw_splat = 0;
2798 VM_ASSERT(calling->kw_splat == 0);
2802 VALUE ary = cfp->sp[0];
2804 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2809 VALUE last_hash, argv_ary;
2810 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2811 if (!IS_ARGS_KEYWORD(ci) &&
2814 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2816 rb_ary_pop(argv_ary);
2818 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2819 calling->kw_splat = 1;
2825 if (!IS_ARGS_KEYWORD(ci) &&
2826 calling->argc > 0 &&
2828 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2835 cfp->sp[-1] = rb_hash_dup(last_hash);
2836 calling->kw_splat = 1;
2842 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2844 VM_ASSERT(calling->kw_splat == 1);
2845 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2850 calling->kw_splat = 0;
2856 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2858 VM_ASSERT(calling->kw_splat == 0);
2864 vm_caller_setup_arg_kw(cfp, calling, ci);
2868#define USE_OPT_HIST 0
2871#define OPT_HIST_MAX 64
2872static int opt_hist[OPT_HIST_MAX+1];
2876opt_hist_show_results_at_exit(
void)
2878 for (
int i=0; i<OPT_HIST_MAX; i++) {
2879 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2889 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2890 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2891 const int opt = calling->argc - lead_num;
2892 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2893 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2894 const int param = ISEQ_BODY(iseq)->param.size;
2895 const int local = ISEQ_BODY(iseq)->local_table_size;
2896 const int delta = opt_num - opt;
2898 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2901 if (opt_pc < OPT_HIST_MAX) {
2905 opt_hist[OPT_HIST_MAX]++;
2909 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2917 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2918 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2919 const int opt = calling->argc - lead_num;
2920 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2922 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2925 if (opt_pc < OPT_HIST_MAX) {
2929 opt_hist[OPT_HIST_MAX]++;
2933 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2938 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2939 VALUE *
const locals);
2946 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2947 int param_size = ISEQ_BODY(iseq)->param.size;
2948 int local_size = ISEQ_BODY(iseq)->local_table_size;
2951 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2953 local_size = local_size + vm_ci_argc(calling->cd->ci);
2954 param_size = param_size + vm_ci_argc(calling->cd->ci);
2956 cfp->sp[0] = (
VALUE)calling->cd->ci;
2958 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2968 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2969 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2971 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2972 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2974 const int ci_kw_len = kw_arg->keyword_len;
2975 const VALUE *
const ci_keywords = kw_arg->keywords;
2976 VALUE *argv = cfp->sp - calling->argc;
2977 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2978 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2980 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2981 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2983 int param = ISEQ_BODY(iseq)->param.size;
2984 int local = ISEQ_BODY(iseq)->local_table_size;
2985 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2992 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2995 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2996 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2998 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2999 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3000 VALUE *
const argv = cfp->sp - calling->argc;
3001 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3004 for (i=0; i<kw_param->num; i++) {
3005 klocals[i] = kw_param->default_values[i];
3012 int param = ISEQ_BODY(iseq)->param.size;
3013 int local = ISEQ_BODY(iseq)->local_table_size;
3014 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3024 cfp->sp -= (calling->argc + 1);
3025 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3026 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3035 st_table *dup_check_table = vm->unused_block_warning_table;
3045 .v = (
VALUE)cme->def,
3049 if (!strict_unused_block) {
3050 key = (st_data_t)cme->def->original_id;
3052 if (st_lookup(dup_check_table, key, NULL)) {
3062 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3067 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3068 fprintf(stderr,
"key:%p\n", (
void *)key);
3072 if (st_insert(dup_check_table, key, 1)) {
3077 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3079 if (!
NIL_P(m_loc)) {
3080 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3084 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3091 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3096 VM_ASSERT((vm_ci_argc(ci), 1));
3097 VM_ASSERT(vm_cc_cme(cc) != NULL);
3099 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3100 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3101 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3102 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3105 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3106 if (LIKELY(rb_simple_iseq_p(iseq))) {
3108 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3109 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3111 if (calling->argc != lead_num) {
3112 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3116 VM_ASSERT(cc == calling->cc);
3118 if (vm_call_iseq_optimizable_p(ci, cc)) {
3119 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3121 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3122 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3123 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3126 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3131 else if (rb_iseq_only_optparam_p(iseq)) {
3134 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3135 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3137 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3138 const int argc = calling->argc;
3139 const int opt = argc - lead_num;
3141 if (opt < 0 || opt > opt_num) {
3142 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3145 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3146 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3147 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3148 vm_call_cacheable(ci, cc));
3151 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3152 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3153 vm_call_cacheable(ci, cc));
3157 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3158 for (
int i=argc; i<lead_num + opt_num; i++) {
3161 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3163 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3164 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3165 const int argc = calling->argc;
3166 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3168 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3171 if (argc - kw_arg->keyword_len == lead_num) {
3172 const int ci_kw_len = kw_arg->keyword_len;
3173 const VALUE *
const ci_keywords = kw_arg->keywords;
3175 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3177 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3178 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3180 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3181 vm_call_cacheable(ci, cc));
3186 else if (argc == lead_num) {
3188 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3189 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3191 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3193 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3194 vm_call_cacheable(ci, cc));
3220 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3221 bool can_fastpath =
true;
3223 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3225 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3226 ci = vm_ci_new_runtime(
3232 ci = forward_cd->caller_ci;
3234 can_fastpath =
false;
3238 if (!vm_ci_markable(ci)) {
3239 ci = vm_ci_new_runtime(
3244 can_fastpath =
false;
3246 argv[param_size - 1] = (
VALUE)ci;
3247 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3251 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3278 const VALUE * lep = VM_CF_LEP(cfp);
3284 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3289 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3293 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3295 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3296 VALUE * to = cfp->sp - 1;
3300 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3305 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3307 cfp->sp = to + argc;
3326 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3329 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3330 int param_size = ISEQ_BODY(iseq)->param.size;
3331 int local_size = ISEQ_BODY(iseq)->local_table_size;
3333 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3335 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3336 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3342 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3345 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3346 int param_size = ISEQ_BODY(iseq)->param.size;
3347 int local_size = ISEQ_BODY(iseq)->local_table_size;
3349 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3352 local_size = local_size + vm_ci_argc(calling->cd->ci);
3353 param_size = param_size + vm_ci_argc(calling->cd->ci);
3355 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3356 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3361 int opt_pc,
int param_size,
int local_size)
3366 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3367 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3370 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3376 int opt_pc,
int param_size,
int local_size)
3378 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3379 VALUE *argv = cfp->sp - calling->argc;
3380 VALUE *sp = argv + param_size;
3381 cfp->sp = argv - 1 ;
3383 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3384 calling->block_handler, (
VALUE)me,
3385 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3386 local_size - param_size,
3387 ISEQ_BODY(iseq)->stack_max);
3396 VALUE *argv = cfp->sp - calling->argc;
3398 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3399 VALUE *src_argv = argv;
3400 VALUE *sp_orig, *sp;
3401 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3403 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3404 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3405 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3406 dst_captured->code.val = src_captured->code.val;
3407 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3408 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3411 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3415 vm_pop_frame(ec, cfp, cfp->ep);
3418 sp_orig = sp = cfp->sp;
3421 sp[0] = calling->recv;
3425 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3426 *sp++ = src_argv[i];
3429 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3430 calling->recv, calling->block_handler, (
VALUE)me,
3431 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3432 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3433 ISEQ_BODY(iseq)->stack_max);
3441ractor_unsafe_check(
void)
3443 if (!rb_ractor_main_p()) {
3444 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3451 ractor_unsafe_check();
3459 ractor_unsafe_check();
3461 return (*f)(argc, argv, recv);
3467 ractor_unsafe_check();
3475 ractor_unsafe_check();
3477 return (*f)(recv, argv[0]);
3483 ractor_unsafe_check();
3485 return (*f)(recv, argv[0], argv[1]);
3491 ractor_unsafe_check();
3493 return (*f)(recv, argv[0], argv[1], argv[2]);
3499 ractor_unsafe_check();
3501 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3507 ractor_unsafe_check();
3508 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3509 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3515 ractor_unsafe_check();
3516 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3517 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3523 ractor_unsafe_check();
3524 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3525 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3531 ractor_unsafe_check();
3532 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3533 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3539 ractor_unsafe_check();
3540 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3541 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3547 ractor_unsafe_check();
3548 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3549 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3555 ractor_unsafe_check();
3556 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3557 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3563 ractor_unsafe_check();
3564 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3565 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3571 ractor_unsafe_check();
3572 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3573 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3579 ractor_unsafe_check();
3580 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3581 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3587 ractor_unsafe_check();
3588 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3589 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3603 return (*f)(argc, argv, recv);
3617 return (*f)(recv, argv[0]);
3624 return (*f)(recv, argv[0], argv[1]);
3631 return (*f)(recv, argv[0], argv[1], argv[2]);
3638 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3644 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3645 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3651 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3652 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3658 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3659 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3665 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3666 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3672 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3673 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3686 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3687 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3693 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3694 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3700 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3701 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3707 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3708 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3714 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3715 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3721 const int ov_flags = RAISED_STACKOVERFLOW;
3722 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3723 if (rb_ec_raised_p(ec, ov_flags)) {
3724 rb_ec_raised_reset(ec, ov_flags);
3730#define CHECK_CFP_CONSISTENCY(func) \
3731 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3732 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3738#if VM_DEBUG_VERIFY_METHOD_CACHE
3739 switch (me->def->type) {
3740 case VM_METHOD_TYPE_CFUNC:
3741 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3743# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3745 METHOD_BUG(ATTRSET);
3747 METHOD_BUG(BMETHOD);
3750 METHOD_BUG(OPTIMIZED);
3751 METHOD_BUG(MISSING);
3752 METHOD_BUG(REFINED);
3756 rb_bug(
"wrong method type: %d", me->def->type);
3759 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3766 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3773 VALUE recv = calling->recv;
3774 VALUE block_handler = calling->block_handler;
3775 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3777 if (UNLIKELY(calling->kw_splat)) {
3778 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3781 VM_ASSERT(reg_cfp == ec->cfp);
3783 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3786 vm_push_frame(ec, NULL, frame_type, recv,
3787 block_handler, (
VALUE)me,
3788 0, ec->cfp->sp, 0, 0);
3790 int len = cfunc->argc;
3793 reg_cfp->sp = stack_bottom;
3794 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3796 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3798 rb_vm_pop_frame(ec);
3800 VM_ASSERT(ec->cfp->sp == stack_bottom);
3802 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3803 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3813 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3815 VALUE *sp = ec->cfp->sp;
3816 VALUE recv = *(sp - recv_idx - 1);
3817 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3818 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3819#if VM_CHECK_MODE > 0
3821 *(GET_EC()->cfp->sp) =
Qfalse;
3823 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3828rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3830 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3836 int argc = calling->argc;
3837 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3838 VALUE *argv = &stack_bottom[1];
3840 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3847 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3849 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3851 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3852 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3855 VALUE *stack_bottom = reg_cfp->sp - 2;
3857 VM_ASSERT(calling->argc == 1);
3861 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3864 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3866 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3873 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3876 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3877 return vm_call_cfunc_other(ec, reg_cfp, calling);
3881 calling->kw_splat = 0;
3883 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3884 VALUE *sp = stack_bottom;
3885 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3886 for(i = 0; i < argc; i++) {
3891 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3897 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3898 VALUE argv_ary = reg_cfp->sp[-1];
3902 int argc_offset = 0;
3904 if (UNLIKELY(argc > 0 &&
3906 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3908 return vm_call_cfunc_other(ec, reg_cfp, calling);
3912 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3918 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3919 VALUE keyword_hash = reg_cfp->sp[-1];
3922 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3925 return vm_call_cfunc_other(ec, reg_cfp, calling);
3932 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3934 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3935 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3937 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3938 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3940 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3942 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3943 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3947 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3948 return vm_call_cfunc_other(ec, reg_cfp, calling);
3955 RB_DEBUG_COUNTER_INC(ccf_ivar);
3957 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3964 RB_DEBUG_COUNTER_INC(ccf_attrset);
3965 VALUE val = *(cfp->sp - 1);
3967 attr_index_t index = vm_cc_attr_index(cc);
3968 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3969 ID id = vm_cc_cme(cc)->def->body.attr.id;
3970 rb_check_frozen(obj);
3971 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3980 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3981 if (!UNDEF_P(res)) {
3986 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3994 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4004 VALUE procv = cme->def->body.bmethod.proc;
4007 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4008 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4012 GetProcPtr(procv, proc);
4013 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4023 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4027 VALUE procv = cme->def->body.bmethod.proc;
4030 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4031 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4035 GetProcPtr(procv, proc);
4036 const struct rb_block *block = &proc->block;
4038 while (vm_block_type(block) == block_type_proc) {
4039 block = vm_proc_block(block->as.proc);
4041 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4044 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4045 VALUE *
const argv = cfp->sp - calling->argc;
4046 const int arg_size = ISEQ_BODY(iseq)->param.size;
4049 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4050 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4053 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4058 vm_push_frame(ec, iseq,
4059 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4061 VM_GUARDED_PREV_EP(captured->ep),
4063 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4065 ISEQ_BODY(iseq)->local_table_size - arg_size,
4066 ISEQ_BODY(iseq)->stack_max);
4074 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4078 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4079 if (UNLIKELY(calling->heap_argv)) {
4084 argc = calling->argc;
4087 cfp->sp += - argc - 1;
4090 return vm_call_bmethod_body(ec, calling, argv);
4096 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4100 VALUE procv = cme->def->body.bmethod.proc;
4102 GetProcPtr(procv, proc);
4103 const struct rb_block *block = &proc->block;
4105 while (vm_block_type(block) == block_type_proc) {
4106 block = vm_proc_block(block->as.proc);
4108 if (vm_block_type(block) == block_type_iseq) {
4109 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4110 return vm_call_iseq_bmethod(ec, cfp, calling);
4113 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4114 return vm_call_noniseq_bmethod(ec, cfp, calling);
4118rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4120 VALUE klass = current_class;
4128 while (
RTEST(klass)) {
4130 if (owner == target_owner) {
4136 return current_class;
4145 if (orig_me->defined_class == 0) {
4146 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4147 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4148 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4150 if (me->def->reference_count == 1) {
4151 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4155 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4163 VM_ASSERT(callable_method_entry_p(cme));
4170 return aliased_callable_method_entry(me);
4176 calling->cc = &VM_CC_ON_STACK(
Qundef,
4179 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4181 return vm_call_method_each_type(ec, cfp, calling);
4184static enum method_missing_reason
4187 enum method_missing_reason stat = MISSING_NOENTRY;
4188 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4189 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4190 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4200 ASSUME(calling->argc >= 0);
4202 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4203 int argc = calling->argc;
4204 VALUE recv = calling->recv;
4207 flags |= VM_CALL_OPT_SEND;
4209 if (UNLIKELY(! mid)) {
4210 mid = idMethodMissing;
4211 missing_reason = ci_missing_reason(ci);
4212 ec->method_missing_reason = missing_reason;
4215 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4216 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4217 rb_ary_unshift(argv_ary, symbol);
4220 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4221 VALUE exc = rb_make_no_method_exception(
4243 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4246 argc = ++calling->argc;
4248 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4251 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4252 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4253 VALUE exc = rb_make_no_method_exception(
4266 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4272 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4273 calling->cd = &new_fcd.cd;
4277 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4278 new_fcd.caller_ci = caller_ci;
4281 calling->cc = &VM_CC_ON_STACK(klass,
4283 { .method_missing_reason = missing_reason },
4284 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4286 if (flags & VM_CALL_FCALL) {
4287 return vm_call_method(ec, reg_cfp, calling);
4291 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4293 if (vm_cc_cme(cc) != NULL) {
4294 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4295 case METHOD_VISI_PUBLIC:
4296 return vm_call_method_each_type(ec, reg_cfp, calling);
4297 case METHOD_VISI_PRIVATE:
4298 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4300 case METHOD_VISI_PROTECTED:
4301 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4304 VM_UNREACHABLE(vm_call_method);
4306 return vm_call_method_missing(ec, reg_cfp, calling);
4309 return vm_call_method_nome(ec, reg_cfp, calling);
4319 i = calling->argc - 1;
4321 if (calling->argc == 0) {
4322 rb_raise(rb_eArgError,
"no method name given");
4346 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4352 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4354 int flags = VM_CALL_FCALL;
4358 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4359 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4360 sym = rb_ary_shift(argv_ary);
4361 flags |= VM_CALL_ARGS_SPLAT;
4362 if (calling->kw_splat) {
4363 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4364 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4365 calling->kw_splat = 0;
4367 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4370 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4371 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4377 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4378 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4384 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4387 int flags = vm_ci_flag(ci);
4389 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4390 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4391 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4392 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4393 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4394 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4397 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4398 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4403 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4405 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4407 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4408 unsigned int argc, flag;
4410 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4411 argc = ++calling->argc;
4414 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4415 vm_check_canary(ec, reg_cfp->sp);
4419 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4422 ec->method_missing_reason = reason;
4426 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4432 if (!(flag & VM_CALL_FORWARDING)) {
4433 calling->cd = &new_fcd.cd;
4437 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4438 new_fcd.caller_ci = caller_ci;
4442 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4443 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4444 return vm_call_method(ec, reg_cfp, calling);
4450 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4461 return vm_call_method_nome(ec, cfp, calling);
4463 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4464 cme->def->body.refined.orig_me) {
4465 cme = refined_method_callable_without_refinement(cme);
4468 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4470 return vm_call_method_each_type(ec, cfp, calling);
4474find_refinement(
VALUE refinements,
VALUE klass)
4476 if (
NIL_P(refinements)) {
4479 return rb_hash_lookup(refinements, klass);
4488 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4489 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4492 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4493 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4497 }
while (cfp->iseq != local_iseq);
4508 if (orig_me->defined_class == 0) {
4516 VM_ASSERT(callable_method_entry_p(cme));
4518 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4528 ID mid = vm_ci_mid(calling->cd->ci);
4529 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4533 for (; cref; cref = CREF_NEXT(cref)) {
4534 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4535 if (
NIL_P(refinement))
continue;
4538 rb_callable_method_entry(refinement, mid);
4541 if (vm_cc_call(cc) == vm_call_super_method) {
4544 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4549 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4550 cme->def != ref_me->def) {
4553 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4562 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4563 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4578 if (calling->cd->cc) {
4579 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4581 return vm_call_method(ec, cfp, calling);
4584 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4585 calling->cc= ref_cc;
4586 return vm_call_method(ec, cfp, calling);
4590 return vm_call_method_nome(ec, cfp, calling);
4596NOINLINE(
static VALUE
4604 int argc = calling->argc;
4607 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4610 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4616 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4619 VALUE procval = calling->recv;
4620 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4626 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4628 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4631 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4632 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4635 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4636 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4637 return vm_call_general(ec, reg_cfp, calling);
4644 VALUE recv = calling->recv;
4647 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4648 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4650 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4651 return internal_RSTRUCT_GET(recv,
off);
4657 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4659 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4667 VALUE recv = calling->recv;
4670 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4671 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4673 rb_check_frozen(recv);
4675 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4676 internal_RSTRUCT_SET(recv,
off, val);
4684 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4686 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4694#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4695 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4696 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4697 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4699 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4700 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4711 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4712 case OPTIMIZED_METHOD_TYPE_SEND:
4713 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4714 return vm_call_opt_send(ec, cfp, calling);
4715 case OPTIMIZED_METHOD_TYPE_CALL:
4716 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4717 return vm_call_opt_call(ec, cfp, calling);
4718 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4719 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4720 return vm_call_opt_block_call(ec, cfp, calling);
4721 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4722 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4726 VM_CALL_METHOD_ATTR(v,
4727 vm_call_opt_struct_aref(ec, cfp, calling),
4728 set_vm_cc_ivar(cc); \
4729 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4732 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4733 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4737 VM_CALL_METHOD_ATTR(v,
4738 vm_call_opt_struct_aset(ec, cfp, calling),
4739 set_vm_cc_ivar(cc); \
4740 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4744 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4756 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4758 switch (cme->def->type) {
4759 case VM_METHOD_TYPE_ISEQ:
4760 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4761 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4762 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4765 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4766 return vm_call_iseq_setup(ec, cfp, calling);
4769 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4770 case VM_METHOD_TYPE_CFUNC:
4771 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4772 return vm_call_cfunc(ec, cfp, calling);
4774 case VM_METHOD_TYPE_ATTRSET:
4775 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4779 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4781 if (vm_cc_markable(cc)) {
4782 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4783 VM_CALL_METHOD_ATTR(v,
4784 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4785 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4791 VM_CALLCACHE_UNMARKABLE |
4792 VM_CALLCACHE_ON_STACK,
4798 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4803 VM_CALL_METHOD_ATTR(v,
4804 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4805 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4809 case VM_METHOD_TYPE_IVAR:
4810 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4812 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4813 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4814 VM_CALL_METHOD_ATTR(v,
4815 vm_call_ivar(ec, cfp, calling),
4816 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4819 case VM_METHOD_TYPE_MISSING:
4820 vm_cc_method_missing_reason_set(cc, 0);
4821 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4822 return vm_call_method_missing(ec, cfp, calling);
4824 case VM_METHOD_TYPE_BMETHOD:
4825 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4826 return vm_call_bmethod(ec, cfp, calling);
4828 case VM_METHOD_TYPE_ALIAS:
4829 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4830 return vm_call_alias(ec, cfp, calling);
4832 case VM_METHOD_TYPE_OPTIMIZED:
4833 return vm_call_optimized(ec, cfp, calling, ci, cc);
4835 case VM_METHOD_TYPE_UNDEF:
4838 case VM_METHOD_TYPE_ZSUPER:
4839 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4841 case VM_METHOD_TYPE_REFINED:
4844 return vm_call_refined(ec, cfp, calling);
4847 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4857 const int stat = ci_missing_reason(ci);
4859 if (vm_ci_mid(ci) == idMethodMissing) {
4860 if (UNLIKELY(calling->heap_argv)) {
4865 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4866 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4870 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4882 VALUE defined_class = me->defined_class;
4883 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4884 return NIL_P(refined_class) ? defined_class : refined_class;
4893 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4895 if (vm_cc_cme(cc) != NULL) {
4896 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4897 case METHOD_VISI_PUBLIC:
4898 return vm_call_method_each_type(ec, cfp, calling);
4900 case METHOD_VISI_PRIVATE:
4901 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4902 enum method_missing_reason stat = MISSING_PRIVATE;
4903 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4905 vm_cc_method_missing_reason_set(cc, stat);
4906 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4907 return vm_call_method_missing(ec, cfp, calling);
4909 return vm_call_method_each_type(ec, cfp, calling);
4911 case METHOD_VISI_PROTECTED:
4912 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4913 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4915 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4916 return vm_call_method_missing(ec, cfp, calling);
4920 VM_ASSERT(vm_cc_cme(cc) != NULL);
4923 calling->cc = &cc_on_stack;
4924 return vm_call_method_each_type(ec, cfp, calling);
4927 return vm_call_method_each_type(ec, cfp, calling);
4930 rb_bug(
"unreachable");
4934 return vm_call_method_nome(ec, cfp, calling);
4941 RB_DEBUG_COUNTER_INC(ccf_general);
4942 return vm_call_method(ec, reg_cfp, calling);
4948 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4949 VM_ASSERT(cc != vm_cc_empty());
4951 *(vm_call_handler *)&cc->call_ = vm_call_general;
4957 RB_DEBUG_COUNTER_INC(ccf_super_method);
4962 if (ec == NULL) rb_bug(
"unreachable");
4965 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4966 return vm_call_method(ec, reg_cfp, calling);
4972vm_search_normal_superclass(
VALUE klass)
4977 klass =
RBASIC(klass)->klass;
4979 klass = RCLASS_ORIGIN(klass);
4983NORETURN(
static void vm_super_outside(
void));
4986vm_super_outside(
void)
4992empty_cc_for_super(
void)
4994 return &vm_empty_cc_for_super;
5000 VALUE current_defined_class;
5007 current_defined_class = vm_defined_class_for_protected_call(me);
5010 reg_cfp->iseq != method_entry_iseqptr(me) &&
5013 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5017 "self has wrong type to call super in this context: "
5018 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5023 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5025 "implicit argument passing of super from method defined"
5026 " by define_method() is not supported."
5027 " Specify all arguments explicitly.");
5030 ID mid = me->def->original_id;
5032 if (!vm_ci_markable(cd->ci)) {
5033 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5037 cd->ci = vm_ci_new_runtime(mid,
5040 vm_ci_kwarg(cd->ci));
5047 VALUE klass = vm_search_normal_superclass(me->defined_class);
5051 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5055 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5059 if (cached_cme == NULL) {
5061 cd->cc = empty_cc_for_super();
5063 else if (cached_cme->called_id != mid) {
5066 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5070 cd->cc = cc = empty_cc_for_super();
5074 switch (cached_cme->def->type) {
5076 case VM_METHOD_TYPE_REFINED:
5078 case VM_METHOD_TYPE_ATTRSET:
5079 case VM_METHOD_TYPE_IVAR:
5080 vm_cc_call_set(cc, vm_call_super_method);
5088 VM_ASSERT((vm_cc_cme(cc),
true));
5096block_proc_is_lambda(
const VALUE procval)
5101 GetProcPtr(procval, proc);
5102 return proc->is_lambda;
5112 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5115 int is_lambda = FALSE;
5116 VALUE val, arg, blockarg;
5118 const struct vm_ifunc *ifunc = captured->code.ifunc;
5123 else if (argc == 0) {
5130 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5132 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5134 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5137 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5140 VM_GUARDED_PREV_EP(captured->ep),
5142 0, ec->cfp->sp, 0, 0);
5143 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5144 rb_vm_pop_frame(ec);
5152 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5158 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5167 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5169 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5177vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5179 VALUE ary, arg0 = argv[0];
5180 ary = rb_check_array_type(arg0);
5184 VM_ASSERT(argv[0] == arg0);
5192 if (rb_simple_iseq_p(iseq)) {
5196 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5198 if (arg_setup_type == arg_setup_block &&
5199 calling->argc == 1 &&
5200 ISEQ_BODY(iseq)->param.flags.has_lead &&
5201 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5202 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5203 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5206 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5207 if (arg_setup_type == arg_setup_block) {
5208 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5210 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5211 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5212 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5214 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5215 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5219 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5226 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5235 calling = &calling_entry;
5236 calling->argc = argc;
5237 calling->block_handler = block_handler;
5238 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5240 calling->heap_argv = 0;
5241 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5243 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5251 bool is_lambda,
VALUE block_handler)
5254 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5255 const int arg_size = ISEQ_BODY(iseq)->param.size;
5256 VALUE *
const rsp = GET_SP() - calling->argc;
5257 VALUE *
const argv = rsp;
5258 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5262 vm_push_frame(ec, iseq,
5263 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5265 VM_GUARDED_PREV_EP(captured->ep), 0,
5266 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5268 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5276 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5278 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5279 int flags = vm_ci_flag(ci);
5281 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5282 ((calling->argc == 0) ||
5283 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5284 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5285 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5286 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5288 if (UNLIKELY(calling->heap_argv)) {
5289#if VM_ARGC_STACK_MAX < 0
5291 rb_raise(rb_eArgError,
"no receiver given");
5294 calling->recv = rb_ary_shift(calling->heap_argv);
5297 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5298 reg_cfp->sp[-2] = calling->recv;
5299 flags |= VM_CALL_ARGS_SPLAT;
5302 if (calling->argc < 1) {
5303 rb_raise(rb_eArgError,
"no receiver given");
5305 calling->recv = TOPN(--calling->argc);
5307 if (calling->kw_splat) {
5308 flags |= VM_CALL_KW_SPLAT;
5312 if (calling->argc < 1) {
5313 rb_raise(rb_eArgError,
"no receiver given");
5315 calling->recv = TOPN(--calling->argc);
5318 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5324 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5329 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5330 argc = calling->argc;
5331 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5337vm_proc_to_block_handler(
VALUE procval)
5339 const struct rb_block *block = vm_proc_block(procval);
5341 switch (vm_block_type(block)) {
5342 case block_type_iseq:
5343 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5344 case block_type_ifunc:
5345 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5346 case block_type_symbol:
5347 return VM_BH_FROM_SYMBOL(block->as.symbol);
5348 case block_type_proc:
5349 return VM_BH_FROM_PROC(block->as.proc);
5351 VM_UNREACHABLE(vm_yield_with_proc);
5358 bool is_lambda,
VALUE block_handler)
5360 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5361 VALUE proc = VM_BH_TO_PROC(block_handler);
5362 is_lambda = block_proc_is_lambda(proc);
5363 block_handler = vm_proc_to_block_handler(proc);
5366 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5372 bool is_lambda,
VALUE block_handler)
5376 bool is_lambda,
VALUE block_handler);
5378 switch (vm_block_handler_type(block_handler)) {
5379 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5380 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5381 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5382 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5383 default: rb_bug(
"vm_invoke_block: unreachable");
5386 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5390vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5397 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5400 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5401 captured->code.iseq = blockiseq;
5403 return rb_vm_make_proc(ec, captured,
rb_cProc);
5407vm_once_exec(
VALUE iseq)
5414vm_once_clear(
VALUE data)
5417 is->once.running_thread = NULL;
5429 args[0] = obj; args[1] =
Qfalse;
5431 if (!UNDEF_P(r) &&
RTEST(r)) {
5443 enum defined_type
type = (
enum defined_type)op_type;
5450 return rb_gvar_defined(
SYM2ID(obj));
5452 case DEFINED_CVAR: {
5453 const rb_cref_t *cref = vm_get_cref(GET_EP());
5454 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5459 case DEFINED_CONST_FROM: {
5460 bool allow_nil =
type == DEFINED_CONST;
5462 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5467 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5469 case DEFINED_METHOD:{
5474 switch (METHOD_ENTRY_VISI(me)) {
5475 case METHOD_VISI_PRIVATE:
5477 case METHOD_VISI_PROTECTED:
5481 case METHOD_VISI_PUBLIC:
5485 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5489 return check_respond_to_missing(obj, v);
5494 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5498 case DEFINED_ZSUPER:
5503 VALUE klass = vm_search_normal_superclass(me->defined_class);
5504 if (!klass)
return false;
5506 ID id = me->def->original_id;
5513 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5515 rb_bug(
"unimplemented defined? type (VM)");
5525 return vm_defined(ec, reg_cfp, op_type, obj, v);
5529vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5532 const VALUE *ep = reg_ep;
5533 for (i = 0; i < lv; i++) {
5534 ep = GET_PREV_EP(ep);
5540vm_get_special_object(
const VALUE *
const reg_ep,
5541 enum vm_special_object_type
type)
5544 case VM_SPECIAL_OBJECT_VMCORE:
5545 return rb_mRubyVMFrozenCore;
5546 case VM_SPECIAL_OBJECT_CBASE:
5547 return vm_get_cbase(reg_ep);
5548 case VM_SPECIAL_OBJECT_CONST_BASE:
5549 return vm_get_const_base(reg_ep);
5551 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5558 const VALUE ary2 = ary2st;
5559 VALUE tmp1 = rb_check_to_array(ary1);
5560 VALUE tmp2 = rb_check_to_array(ary2);
5566 tmp1 = rb_ary_dup(ary1);
5570 return rb_ary_push(tmp1, ary2);
5572 return rb_ary_concat(tmp1, tmp2);
5580 const VALUE ary2 = ary2st;
5582 if (
NIL_P(ary2))
return ary1;
5584 VALUE tmp2 = rb_check_to_array(ary2);
5587 return rb_ary_push(ary1, ary2);
5589 return rb_ary_concat(ary1, tmp2);
5598 return vm_concat_array(ary1, ary2st);
5602rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5604 return vm_concat_to_array(ary1, ary2st);
5611 return RTEST(flag) ? rb_ary_new() : rb_cArray_empty_frozen;
5613 VALUE tmp = rb_check_to_array(ary);
5617 else if (
RTEST(flag)) {
5618 return rb_ary_dup(tmp);
5630 return vm_splat_array(flag, ary);
5636 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5638 if (flag & VM_CHECKMATCH_ARRAY) {
5642 for (i = 0; i < n; i++) {
5644 VALUE c = check_match(ec, v, target,
type);
5653 return check_match(ec, pattern, target,
type);
5660 return vm_check_match(ec, target, pattern, flag);
5664vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5666 const VALUE kw_bits = *(ep - bits);
5669 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5670 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5683 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5684 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5685 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5686 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5690 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5693 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5696 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5699 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5706vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5711 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5712 return rb_public_const_get_at(cbase,
id);
5720vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5725 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5730 "superclass mismatch for class %"PRIsVALUE
"",
5743vm_check_if_module(
ID id,
VALUE mod)
5754vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5757 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5761 rb_const_set_raw(cbase,
id, c);
5763 rb_const_added(cbase,
id);
5768vm_declare_module(
ID id,
VALUE cbase)
5776NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5780 VALUE name = rb_id2str(
id);
5781 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5783 VALUE location = rb_const_source_location_at(cbase,
id);
5784 if (!
NIL_P(location)) {
5785 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5786 " previous definition of %"PRIsVALUE
" was here",
5787 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5793vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5797 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5799 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5803 vm_check_if_namespace(cbase);
5807 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5808 if (!vm_check_if_class(
id, flags, super, klass))
5809 unmatched_redefinition(
"class", cbase,
id, klass);
5813 return vm_declare_class(
id, flags, cbase, super);
5818vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5822 vm_check_if_namespace(cbase);
5823 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5824 if (!vm_check_if_module(
id, mod))
5825 unmatched_redefinition(
"module", cbase,
id, mod);
5829 return vm_declare_module(
id, cbase);
5834vm_find_or_create_class_by_id(
ID id,
5839 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5842 case VM_DEFINECLASS_TYPE_CLASS:
5844 return vm_define_class(
id, flags, cbase, super);
5846 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5850 case VM_DEFINECLASS_TYPE_MODULE:
5852 return vm_define_module(
id, flags, cbase);
5855 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5859static rb_method_visibility_t
5864 if (!vm_env_cref_by_cref(cfp->ep)) {
5865 return METHOD_VISI_PUBLIC;
5868 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5877 if (!vm_env_cref_by_cref(cfp->ep)) {
5881 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5889 rb_method_visibility_t visi;
5894 visi = METHOD_VISI_PUBLIC;
5897 klass = CREF_CLASS_FOR_DEFINITION(cref);
5898 visi = vm_scope_visibility_get(ec);
5905 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5909 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5912 if (!is_singleton && vm_scope_module_func_check(ec)) {
5914 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5924 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5926 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5927 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5930 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5934enum method_explorer_type {
5936 mexp_search_invokeblock,
5945 VALUE block_handler,
5946 enum method_explorer_type method_explorer
5951 int argc = vm_ci_argc(ci);
5952 VALUE recv = TOPN(argc);
5954 .block_handler = block_handler,
5955 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5961 switch (method_explorer) {
5962 case mexp_search_method:
5963 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5964 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5966 case mexp_search_super:
5967 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5968 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5970 case mexp_search_invokeblock:
5971 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5988 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5989 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5991 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
5993 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
5998 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
5999 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6010 VALUE bh = VM_BLOCK_HANDLER_NONE;
6011 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6026 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6027 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6029 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6031 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6036 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6037 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6048 VALUE bh = VM_BLOCK_HANDLER_NONE;
6049 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6077 if (check_method_basic_definition(vm_cc_cme(cc))) {
6086 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6092 val = rb_mod_to_s(recv);
6098 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6099 return rb_nil_to_s(recv);
6103 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6104 return rb_true_to_s(recv);
6108 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6109 return rb_false_to_s(recv);
6113 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6114 return rb_fix_to_s(recv);
6122vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6124 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6133vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6135 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6144vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6146 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6160 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6161 return rb_ary_includes(ary, target);
6164 VALUE args[1] = {target};
6167 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6168 VALUE dupary = rb_ary_resurrect(ary);
6170 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6177 return vm_opt_duparray_include_p(ec, ary, target);
6183 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6188 VALUE result = *ptr;
6189 rb_snum_t i = num - 1;
6191 const VALUE v = *++ptr;
6192 if (OPTIMIZED_CMP(v, result) > 0) {
6207 return vm_opt_newarray_max(ec, num, ptr);
6213 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6218 VALUE result = *ptr;
6219 rb_snum_t i = num - 1;
6221 const VALUE v = *++ptr;
6222 if (OPTIMIZED_CMP(v, result) < 0) {
6237 return vm_opt_newarray_min(ec, num, ptr);
6244 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6245 return rb_ary_hash_values(num, ptr);
6255 return vm_opt_newarray_hash(ec, num, ptr);
6264 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6266 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6267 return rb_ary_includes(
ary, target);
6270 VALUE args[1] = {target};
6278 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6284 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6286 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6287 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6297 if (!UNDEF_P(buffer)) {
6298 args[1] = rb_hash_new_with_size(1);
6299 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6304 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6311 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6317 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6323vm_track_constant_cache(
ID id,
void *ic)
6326 struct rb_id_table *const_cache = vm->constant_cache;
6327 VALUE lookup_result;
6330 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6334 ics = st_init_numtable();
6335 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6350 vm->inserting_constant_cache_id = id;
6352 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6354 vm->inserting_constant_cache_id = (
ID)0;
6362 for (
int i = 0; segments[i]; i++) {
6363 ID id = segments[i];
6364 if (
id == idNULL)
continue;
6365 vm_track_constant_cache(
id, ic);
6375 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6376 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6378 return (ic_cref == NULL ||
6379 ic_cref == vm_get_cref(reg_ep));
6387 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6388 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6393rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6395 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6401 if (ruby_vm_const_missing_count > 0) {
6402 ruby_vm_const_missing_count = 0;
6409 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6414 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6415 rb_yjit_constant_ic_update(iseq, ic, pos);
6424 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6427 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6430 ruby_vm_constant_cache_misses++;
6431 val = vm_get_ev_const_chain(ec, segments);
6432 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6435 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6447 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6448 return is->once.value;
6450 else if (is->once.running_thread == NULL) {
6452 is->once.running_thread = th;
6456 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6459 else if (is->once.running_thread == th) {
6461 return vm_once_exec((
VALUE)iseq);
6465 RUBY_VM_CHECK_INTS(ec);
6472vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6474 switch (OBJ_BUILTIN_TYPE(key)) {
6480 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6481 SYMBOL_REDEFINED_OP_FLAG |
6482 INTEGER_REDEFINED_OP_FLAG |
6483 FLOAT_REDEFINED_OP_FLAG |
6484 NIL_REDEFINED_OP_FLAG |
6485 TRUE_REDEFINED_OP_FLAG |
6486 FALSE_REDEFINED_OP_FLAG |
6487 STRING_REDEFINED_OP_FLAG)) {
6491 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6495 if (rb_hash_stlike_lookup(hash, key, &val)) {
6515 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6516 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6517 static const char stack_consistency_error[] =
6518 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6519#if defined RUBY_DEVEL
6520 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6525 rb_bug(stack_consistency_error, nsp, nbp);
6532 if (FIXNUM_2_P(recv, obj) &&
6533 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6534 return rb_fix_plus_fix(recv, obj);
6536 else if (FLONUM_2_P(recv, obj) &&
6537 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6545 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6550 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6551 return rb_str_opt_plus(recv, obj);
6555 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6556 return rb_ary_plus(recv, obj);
6566 if (FIXNUM_2_P(recv, obj) &&
6567 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6568 return rb_fix_minus_fix(recv, obj);
6570 else if (FLONUM_2_P(recv, obj) &&
6571 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6579 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6590 if (FIXNUM_2_P(recv, obj) &&
6591 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6592 return rb_fix_mul_fix(recv, obj);
6594 else if (FLONUM_2_P(recv, obj) &&
6595 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6603 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6614 if (FIXNUM_2_P(recv, obj) &&
6615 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6616 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6618 else if (FLONUM_2_P(recv, obj) &&
6619 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6620 return rb_flo_div_flo(recv, obj);
6627 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6628 return rb_flo_div_flo(recv, obj);
6638 if (FIXNUM_2_P(recv, obj) &&
6639 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6640 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6642 else if (FLONUM_2_P(recv, obj) &&
6643 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6651 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6662 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6663 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6665 if (!UNDEF_P(val)) {
6666 return RBOOL(!
RTEST(val));
6676 if (FIXNUM_2_P(recv, obj) &&
6677 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6680 else if (FLONUM_2_P(recv, obj) &&
6681 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6689 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6701 if (FIXNUM_2_P(recv, obj) &&
6702 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6705 else if (FLONUM_2_P(recv, obj) &&
6706 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6714 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6726 if (FIXNUM_2_P(recv, obj) &&
6727 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6730 else if (FLONUM_2_P(recv, obj) &&
6731 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6739 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6751 if (FIXNUM_2_P(recv, obj) &&
6752 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6755 else if (FLONUM_2_P(recv, obj) &&
6756 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6764 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6781 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6790 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6791 return rb_ary_push(recv, obj);
6808 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6819 if (FIXNUM_2_P(recv, obj) &&
6820 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6832 if (FIXNUM_2_P(recv, obj) &&
6833 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6834 return rb_fix_aref(recv, obj);
6839 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6841 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6844 return rb_ary_aref1(recv, obj);
6848 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6849 return rb_hash_aref(recv, obj);
6863 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6865 rb_ary_store(recv,
FIX2LONG(obj), set);
6869 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6870 rb_hash_aset(recv, obj, set);
6882 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6883 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6884 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6885 return rb_hash_aref(recv, key);
6895 return vm_opt_aref_with(recv, key);
6902 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6903 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6904 return rb_hash_aset(recv, key, val);
6912vm_opt_length(
VALUE recv,
int bop)
6918 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6919 if (bop == BOP_EMPTY_P) {
6920 return LONG2NUM(RSTRING_LEN(recv));
6927 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6931 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6940vm_opt_empty_p(
VALUE recv)
6942 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6955 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6958 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6974 case RSHIFT(~0UL, 1):
6977 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
6995vm_opt_succ(
VALUE recv)
6998 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
6999 return fix_succ(recv);
7005 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7016 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7017 return RBOOL(!
RTEST(recv));
7032 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7036 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7054 VALUE self = GET_SELF();
7056 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7058 if (event & global_hooks->events) {
7061 vm_dtrace(event, ec);
7062 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7068 if (local_hooks != NULL) {
7069 if (event & local_hooks->events) {
7072 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7078#define VM_TRACE_HOOK(target_event, val) do { \
7079 if ((pc_events & (target_event)) & enabled_flags) { \
7080 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7087 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7088 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7089 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7095 const VALUE *pc = reg_cfp->pc;
7096 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7099 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7105 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7108 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7109 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7113 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7114 enabled_flags |= iseq_local_events;
7116 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7118 if (bmethod_frame) {
7120 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7121 bmethod_local_hooks = me->def->body.bmethod.hooks;
7122 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7123 if (bmethod_local_hooks) {
7124 bmethod_local_events = bmethod_local_hooks->events;
7129 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7133 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7141 else if (ec->trace_arg != NULL) {
7149 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7152 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7155 RSTRING_PTR(rb_iseq_path(iseq)),
7156 (
int)rb_iseq_line_no(iseq, pos),
7157 RSTRING_PTR(rb_iseq_label(iseq)));
7159 VM_ASSERT(reg_cfp->pc == pc);
7160 VM_ASSERT(pc_events != 0);
7170 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7171 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7189#if VM_CHECK_MODE > 0
7190NORETURN( NOINLINE( COLDFUNC
7191void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7194Init_vm_stack_canary(
void)
7197 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7198 vm_stack_canary |= 0x01;
7200 vm_stack_canary_was_born =
true;
7205rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7209 const char *insn = rb_insns_name(i);
7213 rb_bug(
"dead canary found at %s: %s", insn, str);
7217void Init_vm_stack_canary(
void) { }
7249 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7256 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7263 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7270 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7277 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7284 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7291 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7298 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7305 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7311 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7312 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7318 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7319 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7325 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7326 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7332 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7333 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7339 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7340 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7346 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7347 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7353 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7354 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7359static builtin_invoker
7360lookup_builtin_invoker(
int argc)
7362 static const builtin_invoker invokers[] = {
7381 return invokers[argc];
7387 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7388 SETUP_CANARY(canary_p);
7389 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7390 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7391 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7398 return invoke_bf(ec, cfp, bf, argv);
7405 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7406 for (
int i=0; i<bf->argc; i++) {
7407 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7409 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7410 (
void *)(uintptr_t)bf->func_ptr);
7413 if (bf->argc == 0) {
7414 return invoke_bf(ec, cfp, bf, NULL);
7417 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7418 return invoke_bf(ec, cfp, bf, argv);
7428 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.