11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
765env_method_entry_unchecked(
VALUE obj,
int can_be_svar)
767 if (obj ==
Qfalse)
return NULL;
769 switch (imemo_type(obj)) {
786 const VALUE *ep = cfp->ep;
789 while (!VM_ENV_LOCAL_P(ep)) {
790 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
791 ep = VM_ENV_PREV_EP(ep);
794 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
800 const VALUE *ep = cfp->ep;
803 while (!VM_ENV_LOCAL_P_UNCHECKED(ep)) {
804 if ((me = env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
805 ep = VM_ENV_PREV_EP_UNCHECKED(ep);
808 return env_method_entry_unchecked(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
814 switch (me->def->type) {
815 case VM_METHOD_TYPE_ISEQ:
816 return me->def->body.iseq.
iseqptr;
825 switch (me->def->type) {
826 case VM_METHOD_TYPE_ISEQ:
827 return me->def->body.iseq.
cref;
833#if VM_CHECK_MODE == 0
837check_cref(
VALUE obj,
int can_be_svar)
839 if (obj ==
Qfalse)
return NULL;
842 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
845 switch (imemo_type(obj)) {
856 rb_bug(
"check_method_entry: svar should not be there:");
863vm_env_cref(
const VALUE *ep)
867 while (!VM_ENV_LOCAL_P(ep)) {
868 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
869 ep = VM_ENV_PREV_EP(ep);
872 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
876is_cref(
const VALUE v,
int can_be_svar)
879 switch (imemo_type(v)) {
892vm_env_cref_by_cref(
const VALUE *ep)
894 while (!VM_ENV_LOCAL_P(ep)) {
895 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
896 ep = VM_ENV_PREV_EP(ep);
898 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
902cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
904 const VALUE v = *vptr;
908 switch (imemo_type(v)) {
911 new_cref = vm_cref_dup(cref);
916 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
921 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
925 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
934vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
936 if (vm_env_cref_by_cref(ep)) {
940 while (!VM_ENV_LOCAL_P(ep)) {
941 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
942 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
945 ep = VM_ENV_PREV_EP(ep);
947 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
948 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
951 rb_bug(
"vm_cref_dup: unreachable");
956vm_get_cref(
const VALUE *ep)
964 rb_bug(
"vm_get_cref: unreachable");
969rb_vm_get_cref(
const VALUE *ep)
971 return vm_get_cref(ep);
982 return vm_get_cref(cfp->ep);
986vm_get_const_key_cref(
const VALUE *ep)
992 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
993 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
996 cref = CREF_NEXT(cref);
1009 #define ADD_NEW_CREF(new_cref) \
1010 if (new_cref_tail) { \
1011 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
1014 new_cref_head = new_cref; \
1016 new_cref_tail = new_cref;
1020 if (CREF_CLASS(cref) == old_klass) {
1021 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1022 ADD_NEW_CREF(new_cref);
1023 return new_cref_head;
1025 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
1026 cref = CREF_NEXT(cref);
1027 ADD_NEW_CREF(new_cref);
1033 return new_cref_head;
1042 prev_cref = vm_env_cref(ep);
1048 prev_cref = vm_env_cref(cfp->ep);
1052 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1056vm_get_cbase(
const VALUE *ep)
1058 const rb_cref_t *cref = vm_get_cref(ep);
1060 return CREF_CLASS_FOR_DEFINITION(cref);
1064vm_get_const_base(
const VALUE *ep)
1066 const rb_cref_t *cref = vm_get_cref(ep);
1069 if (!CREF_PUSHED_BY_EVAL(cref)) {
1070 return CREF_CLASS_FOR_DEFINITION(cref);
1072 cref = CREF_NEXT(cref);
1079vm_check_if_namespace(
VALUE klass)
1082 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1087vm_ensure_not_refinement_module(
VALUE self)
1090 rb_warn(
"not defined at the refinement, but at the outer class/module");
1106 if (
NIL_P(orig_klass) && allow_nil) {
1108 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1112 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1113 root_cref = CREF_NEXT(root_cref);
1116 while (cref && CREF_NEXT(cref)) {
1117 if (CREF_PUSHED_BY_EVAL(cref)) {
1121 klass = CREF_CLASS(cref);
1123 cref = CREF_NEXT(cref);
1125 if (!
NIL_P(klass)) {
1129 if ((ce = rb_const_lookup(klass,
id))) {
1130 rb_const_warn_if_deprecated(ce, klass,
id);
1133 if (am == klass)
break;
1135 if (is_defined)
return 1;
1136 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1138 goto search_continue;
1145 if (UNLIKELY(!rb_ractor_main_p())) {
1147 rb_raise(rb_eRactorIsolationError,
1148 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1159 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1160 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1174 vm_check_if_namespace(orig_klass);
1176 return rb_public_const_defined_from(orig_klass,
id);
1179 return rb_public_const_get_from(orig_klass,
id);
1187 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1195 int allow_nil = TRUE;
1196 if (segments[0] == idNULL) {
1201 while (segments[idx]) {
1202 ID id = segments[idx++];
1203 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1216 rb_bug(
"vm_get_cvar_base: no cref");
1219 while (CREF_NEXT(cref) &&
1220 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1221 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1222 cref = CREF_NEXT(cref);
1224 if (top_level_raise && !CREF_NEXT(cref)) {
1228 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1236ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1238fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1241 vm_cc_attr_index_set(cc, index, shape_id);
1244 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1248#define ractor_incidental_shareable_p(cond, val) \
1249 (!(cond) || rb_ractor_shareable_p(val))
1250#define ractor_object_incidental_shareable_p(obj, val) \
1251 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1260 return default_value;
1270 if (UNLIKELY(!rb_ractor_main_p())) {
1278 if (default_value ==
Qundef) {
1286 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1290 fields_obj = rb_obj_fields(obj,
id);
1294 return default_value;
1299 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1300 VALUE *ivar_list = rb_imemo_fields_ptr(fields_obj);
1302 shape_id_t cached_id;
1306 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1309 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1312 if (LIKELY(cached_id == shape_id)) {
1315 if (index == ATTR_INDEX_NOT_SET) {
1316 return default_value;
1319 val = ivar_list[index];
1320#if USE_DEBUG_COUNTER
1321 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1324 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1330#if USE_DEBUG_COUNTER
1332 if (cached_id != INVALID_SHAPE_ID) {
1333 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1336 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1340 if (cached_id != INVALID_SHAPE_ID) {
1341 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1344 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1347 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1350 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1354 if (UNLIKELY(rb_shape_too_complex_p(shape_id))) {
1358 RUBY_ASSERT(table == rb_imemo_fields_complex_tbl(fields_obj));
1360 if (!st_lookup(table,
id, &val)) {
1361 val = default_value;
1365 shape_id_t previous_cached_id = cached_id;
1366 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1369 if (cached_id != previous_cached_id) {
1370 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1373 if (index == ATTR_INDEX_NOT_SET) {
1374 val = default_value;
1378 val = ivar_list[index];
1384 vm_cc_attr_index_initialize(cc, shape_id);
1387 vm_ic_attr_index_initialize(ic, shape_id);
1390 val = default_value;
1395 if (!UNDEF_P(default_value)) {
1403 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1406 return rb_attr_get(obj,
id);
1414populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1416 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1420 vm_cc_attr_index_set(cc, index, next_shape_id);
1423 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1435 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1437 rb_check_frozen(obj);
1439 attr_index_t index = rb_ivar_set_index(obj,
id, val);
1440 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1442 if (!rb_shape_too_complex_p(next_shape_id)) {
1443 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1446 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1456 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1462 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1465NOINLINE(
static VALUE vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1467vm_setivar_class(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1469 if (UNLIKELY(!rb_ractor_main_p())) {
1473 VALUE fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1474 if (UNLIKELY(!fields_obj)) {
1478 shape_id_t shape_id = RBASIC_SHAPE_ID(fields_obj);
1481 if (shape_id == dest_shape_id) {
1482 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1484 else if (dest_shape_id != INVALID_SHAPE_ID) {
1485 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1486 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1496 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1498 if (shape_id != dest_shape_id) {
1499 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1500 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1503 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1508NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1510vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1512 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1515 if (shape_id == dest_shape_id) {
1516 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1518 else if (dest_shape_id != INVALID_SHAPE_ID) {
1519 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1520 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1530 VALUE fields_obj = rb_obj_fields(obj,
id);
1532 RB_OBJ_WRITE(fields_obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1534 if (shape_id != dest_shape_id) {
1535 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1536 RBASIC_SET_SHAPE_ID(fields_obj, dest_shape_id);
1539 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1545vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1553 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1554 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1556 if (LIKELY(shape_id == dest_shape_id)) {
1557 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1560 else if (dest_shape_id != INVALID_SHAPE_ID) {
1561 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1562 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1564 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1566 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1581 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1582 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1588 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1600 VALUE defined_class = 0;
1604 defined_class =
RBASIC(defined_class)->klass;
1607 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1609 rb_bug(
"the cvc table should be set");
1613 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1614 rb_bug(
"should have cvar cache entry");
1619 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1635 cref = vm_get_cref(GET_EP());
1637 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1638 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1640 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1646 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1648 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1654 return vm_getclassvariable(iseq, cfp,
id, ic);
1661 cref = vm_get_cref(GET_EP());
1663 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1664 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1666 rb_class_ivar_set(ic->entry->class_value,
id, val);
1670 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1674 update_classvariable_cache(iseq, klass,
id, cref, ic);
1680 vm_setclassvariable(iseq, cfp,
id, val, ic);
1686 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1697 shape_id_t dest_shape_id;
1699 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1701 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1707 if (!UNDEF_P(vm_setivar_class(obj,
id, val, dest_shape_id, index))) {
1712 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1716 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1723 vm_setinstancevariable(iseq, obj,
id, val, ic);
1732 ec->tag->state = RUBY_TAG_FATAL;
1735 ec->tag->state = TAG_THROW;
1737 else if (THROW_DATA_P(err)) {
1738 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1741 ec->tag->state = TAG_RAISE;
1748 const int flag,
const VALUE throwobj)
1756 else if (state == TAG_BREAK) {
1758 const VALUE *ep = GET_EP();
1759 const rb_iseq_t *base_iseq = GET_ISEQ();
1760 escape_cfp = reg_cfp;
1762 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1763 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1764 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1765 ep = escape_cfp->ep;
1766 base_iseq = escape_cfp->iseq;
1769 ep = VM_ENV_PREV_EP(ep);
1770 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1771 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1772 VM_ASSERT(escape_cfp->iseq == base_iseq);
1776 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1782 ep = VM_ENV_PREV_EP(ep);
1784 while (escape_cfp < eocfp) {
1785 if (escape_cfp->ep == ep) {
1786 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1787 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1792 for (i=0; i < ct->size; i++) {
1794 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1796 if (entry->type == CATCH_TYPE_BREAK &&
1797 entry->iseq == base_iseq &&
1798 entry->start < epc && entry->end >= epc) {
1799 if (entry->cont == epc) {
1808 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1813 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1816 else if (state == TAG_RETRY) {
1817 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1819 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1821 else if (state == TAG_RETURN) {
1822 const VALUE *current_ep = GET_EP();
1823 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1824 int in_class_frame = 0;
1826 escape_cfp = reg_cfp;
1829 while (!VM_ENV_LOCAL_P(ep)) {
1830 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1833 ep = VM_ENV_PREV_EP(ep);
1837 while (escape_cfp < eocfp) {
1838 const VALUE *lep = VM_CF_LEP(escape_cfp);
1844 if (lep == target_lep &&
1845 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1846 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1851 if (lep == target_lep) {
1852 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1854 if (in_class_frame) {
1859 const VALUE *tep = current_ep;
1861 while (target_lep != tep) {
1862 if (escape_cfp->ep == tep) {
1864 if (tep == target_ep) {
1868 goto unexpected_return;
1871 tep = VM_ENV_PREV_EP(tep);
1875 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1876 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1878 case ISEQ_TYPE_MAIN:
1880 if (in_class_frame)
goto unexpected_return;
1881 if (target_ep == NULL) {
1885 goto unexpected_return;
1889 case ISEQ_TYPE_EVAL: {
1891 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1892 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1893 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1894 t = ISEQ_BODY(is)->type;
1896 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1899 case ISEQ_TYPE_CLASS:
1908 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1909 if (target_ep == NULL) {
1913 goto unexpected_return;
1917 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1920 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1926 rb_bug(
"isns(throw): unsupported throw type");
1929 ec->tag->state = state;
1930 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1935 rb_num_t throw_state,
VALUE throwobj)
1937 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1938 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1941 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1944 return vm_throw_continue(ec, throwobj);
1951 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1957 int is_splat = flag & 0x01;
1960 const VALUE obj = ary;
1972 if (num + is_splat == 0) {
1975 else if (flag & 0x02) {
1980 for (i = 0; i < num -
len; i++) {
1985 for (j = 0; i < num; i++, j++) {
2007 for (; i < num -
len; i++) {
2011 for (rb_num_t j = 0; i < num; i++, j++) {
2012 *cfp->sp++ = ptr[
len - j - 1];
2016 for (rb_num_t j = 0; j < num; j++) {
2017 *cfp->sp++ = ptr[num - j - 1];
2032 int initial_capa = 2;
2034#if VM_CHECK_MODE > 0
2035 ccs->debug_sig = ~(
VALUE)ccs;
2037 ccs->capa = initial_capa;
2042 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2050 if (! vm_cc_markable(cc)) {
2054 if (UNLIKELY(ccs->len == ccs->capa)) {
2057 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2058#if VM_CHECK_MODE > 0
2059 ccs->debug_sig = ~(
VALUE)ccs;
2062 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2064 VM_ASSERT(ccs->len < ccs->capa);
2066 const int pos = ccs->len++;
2067 ccs->entries[pos].argc = vm_ci_argc(ci);
2068 ccs->entries[pos].flag = vm_ci_flag(ci);
2071 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2077#if VM_CHECK_MODE > 0
2081 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2082 for (
int i=0; i<ccs->len; i++) {
2083 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2084 ccs->entries[i].flag,
2085 ccs->entries[i].argc);
2086 rp(ccs->entries[i].cc);
2093 VM_ASSERT(vm_ccs_p(ccs));
2094 VM_ASSERT(ccs->len <= ccs->capa);
2096 for (
int i=0; i<ccs->len; i++) {
2099 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2100 VM_ASSERT(vm_cc_class_check(cc, klass));
2101 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2102 VM_ASSERT(!vm_cc_super_p(cc));
2103 VM_ASSERT(!vm_cc_refinement_p(cc));
2114 ASSERT_vm_locking();
2116 if (rb_multi_ractor_p()) {
2117 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2124 rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj);
2127 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2132 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2133 rb_vm_cc_table_delete(new_table, mid);
2134 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2137 rb_vm_cc_table_delete(cc_tbl, mid);
2144 ASSERT_vm_locking();
2146 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2147 const VALUE original_cc_table = cc_tbl;
2151 cc_tbl = rb_vm_cc_table_create(1);
2153 else if (rb_multi_ractor_p()) {
2154 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2157 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2163 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2165 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2168 cme = rb_callable_method_entry(klass, mid);
2171 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2175 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2176 return &vm_empty_cc;
2179 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2187 if (UNLIKELY(rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj))) {
2192 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2196 cme = rb_check_overloaded_cme(cme, ci);
2198 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2199 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2201 VM_ASSERT(vm_cc_cme(cc) != NULL);
2202 VM_ASSERT(cme->called_id == mid);
2203 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2205 if (original_cc_table != cc_tbl) {
2206 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2218 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2226 if (rb_managed_id_table_lookup(cc_tbl, mid, &ccs_obj)) {
2228 const int ccs_len = ccs->len;
2230 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2232 vm_evict_cc(klass, cc_tbl, mid);
2237 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2242 unsigned int argc = vm_ci_argc(ci);
2243 unsigned int flag = vm_ci_flag(ci);
2245 for (
int i=0; i<ccs_len; i++) {
2246 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2247 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2248 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2250 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2252 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2253 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2255 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2256 VM_ASSERT(ccs_cc->klass == klass);
2257 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2273 const ID mid = vm_ci_mid(ci);
2275 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2281 if (rb_multi_ractor_p()) {
2284 cc = vm_lookup_cc(klass, ci, mid);
2288 cc = vm_populate_cc(klass, ci, mid);
2302 cc = vm_search_cc(klass, ci);
2305 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2306 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2307 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2308 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2309 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2317#if USE_DEBUG_COUNTER
2321 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2323#if OPT_INLINE_METHOD_CACHE
2327 if (cd_owner && cc != empty_cc) {
2331#if USE_DEBUG_COUNTER
2332 if (!old_cc || old_cc == empty_cc) {
2334 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2336 else if (old_cc == cc) {
2337 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2339 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2340 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2342 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2343 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2344 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2347 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2352 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2353 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2364#if OPT_INLINE_METHOD_CACHE
2365 if (LIKELY(vm_cc_class_check(cc, klass))) {
2366 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2367 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2368 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2369 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2370 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2371 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2375 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2378 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2382 return vm_search_method_slowpath0(cd_owner, cd, klass);
2389 VM_ASSERT(klass !=
Qfalse);
2392 const struct rb_callcache *cc = vm_search_method_fastpath(cd_owner, cd, klass);
2393 return vm_cc_cme(cc);
2399 return vm_search_method(cd_owner, cd, recv);
2402#if __has_attribute(transparent_union)
2415 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2416 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2417 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2418 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2419 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2420 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2423# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2426# define make_cfunc_type(f) (cfunc_type)(f)
2436 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2437 VM_ASSERT(callable_method_entry_p(me));
2439 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2443#if __has_attribute(transparent_union)
2444 return me->def->body.cfunc.func == func.anyargs;
2446 return me->def->body.cfunc.func == func;
2455 return me && METHOD_ENTRY_BASIC(me);
2461 VM_ASSERT(iseq != NULL);
2463 return check_cfunc(cme, func);
2469 return check_cfunc(me, func);
2475 return vm_method_cfunc_is(iseq, cd, recv, func);
2478#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2479#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2481#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2513opt_equality_specialized(
VALUE recv,
VALUE obj)
2515 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2516 goto compare_by_identity;
2518 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2519 goto compare_by_identity;
2522 goto compare_by_identity;
2531#if MSC_VERSION_BEFORE(1300)
2535 else if (isnan(b)) {
2540 return RBOOL(a == b);
2547 return rb_str_eql_internal(obj, recv);
2552 compare_by_identity:
2553 return RBOOL(recv == obj);
2559 VM_ASSERT(cd_owner != NULL);
2561 VALUE val = opt_equality_specialized(recv, obj);
2562 if (!UNDEF_P(val))
return val;
2564 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2568 return RBOOL(recv == obj);
2572#undef EQ_UNREDEFINED_P
2575NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2578opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2580 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2582 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2583 return RBOOL(recv == obj);
2593 VALUE val = opt_equality_specialized(recv, obj);
2594 if (!UNDEF_P(val)) {
2598 return opt_equality_by_mid_slowpath(recv, obj, mid);
2605 return opt_equality_by_mid(obj1, obj2, idEq);
2611 return opt_equality_by_mid(obj1, obj2, idEqlP);
2621 case VM_CHECKMATCH_TYPE_WHEN:
2623 case VM_CHECKMATCH_TYPE_RESCUE:
2625 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2628 case VM_CHECKMATCH_TYPE_CASE: {
2629 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2632 rb_bug(
"check_match: unreachable");
2637#if MSC_VERSION_BEFORE(1300)
2638#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2640#define CHECK_CMP_NAN(a, b)
2644double_cmp_lt(
double a,
double b)
2646 CHECK_CMP_NAN(a, b);
2647 return RBOOL(a < b);
2651double_cmp_le(
double a,
double b)
2653 CHECK_CMP_NAN(a, b);
2654 return RBOOL(a <= b);
2658double_cmp_gt(
double a,
double b)
2660 CHECK_CMP_NAN(a, b);
2661 return RBOOL(a > b);
2665double_cmp_ge(
double a,
double b)
2667 CHECK_CMP_NAN(a, b);
2668 return RBOOL(a >= b);
2672static inline VALUE *
2677 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2678 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2680 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2681 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2682 int params = ISEQ_BODY(cfp->iseq)->param.size;
2685 bp += vm_ci_argc(ci);
2688 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2692#if VM_DEBUG_BP_CHECK
2693 if (bp != cfp->bp_check) {
2694 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2695 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2696 (
long)(bp - GET_EC()->vm_stack));
2697 rb_bug(
"vm_base_ptr: unreachable");
2710 return vm_base_ptr(cfp);
2725static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2730 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2732 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2738 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2741 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2742 int param = ISEQ_BODY(iseq)->param.size;
2743 int local = ISEQ_BODY(iseq)->local_table_size;
2744 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2750 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2751 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2752 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2753 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2754 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2755 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2756 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2757 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2761rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2763 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2764 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2765 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2766 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2767 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2768 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2769 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2770 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2774rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2776 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2777 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2778 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2779 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2780 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2781 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2782 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2785#define ALLOW_HEAP_ARGV (-2)
2786#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2791 vm_check_canary(GET_EC(), cfp->sp);
2797 int argc = calling->argc;
2799 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2803 VALUE *argv = cfp->sp - argc;
2807 cfp->sp -= argc - 1;
2808 cfp->sp[-1] = argv_ary;
2810 calling->heap_argv = argv_ary;
2816 if (max_args >= 0 &&
len + argc > max_args) {
2824 calling->argc +=
len - (max_args - argc + 1);
2825 len = max_args - argc + 1;
2834 calling->heap_argv = 0;
2836 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2838 for (i = 0; i <
len; i++) {
2839 *cfp->sp++ = ptr[i];
2851 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2852 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2853 const VALUE h = rb_hash_new_with_size(kw_len);
2854 VALUE *sp = cfp->sp;
2857 for (i=0; i<kw_len; i++) {
2858 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2862 cfp->sp -= kw_len - 1;
2863 calling->argc -= kw_len - 1;
2864 calling->kw_splat = 1;
2868vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2871 if (keyword_hash !=
Qnil) {
2873 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2876 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2882 keyword_hash = rb_hash_dup(keyword_hash);
2884 return keyword_hash;
2890 const struct rb_callinfo *restrict ci,
int max_args)
2892 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2893 if (IS_ARGS_KW_SPLAT(ci)) {
2895 VM_ASSERT(calling->kw_splat == 1);
2899 VALUE ary = cfp->sp[0];
2900 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2903 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2907 if (UNLIKELY(calling->heap_argv)) {
2909 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2910 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2911 calling->kw_splat = 0;
2919 VM_ASSERT(calling->kw_splat == 1);
2923 calling->kw_splat = 0;
2928 VM_ASSERT(calling->kw_splat == 0);
2932 VALUE ary = cfp->sp[0];
2934 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2939 VALUE last_hash, argv_ary;
2940 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2941 if (!IS_ARGS_KEYWORD(ci) &&
2944 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2949 calling->kw_splat = 1;
2955 if (!IS_ARGS_KEYWORD(ci) &&
2956 calling->argc > 0 &&
2958 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2965 cfp->sp[-1] = rb_hash_dup(last_hash);
2966 calling->kw_splat = 1;
2972 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2974 VM_ASSERT(calling->kw_splat == 1);
2975 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2980 calling->kw_splat = 0;
2986 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2988 VM_ASSERT(calling->kw_splat == 0);
2994 vm_caller_setup_arg_kw(cfp, calling, ci);
2998#define USE_OPT_HIST 0
3001#define OPT_HIST_MAX 64
3002static int opt_hist[OPT_HIST_MAX+1];
3006opt_hist_show_results_at_exit(
void)
3008 for (
int i=0; i<OPT_HIST_MAX; i++) {
3009 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
3019 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3020 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3021 const int opt = calling->argc - lead_num;
3022 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3023 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3024 const int param = ISEQ_BODY(iseq)->param.size;
3025 const int local = ISEQ_BODY(iseq)->local_table_size;
3026 const int delta = opt_num - opt;
3028 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3031 if (opt_pc < OPT_HIST_MAX) {
3035 opt_hist[OPT_HIST_MAX]++;
3039 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
3047 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3048 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3049 const int opt = calling->argc - lead_num;
3050 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
3052 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
3055 if (opt_pc < OPT_HIST_MAX) {
3059 opt_hist[OPT_HIST_MAX]++;
3063 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3068 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
3069 VALUE *
const locals);
3076 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3077 int param_size = ISEQ_BODY(iseq)->param.size;
3078 int local_size = ISEQ_BODY(iseq)->local_table_size;
3081 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3083 local_size = local_size + vm_ci_argc(calling->cd->ci);
3084 param_size = param_size + vm_ci_argc(calling->cd->ci);
3086 cfp->sp[0] = (
VALUE)calling->cd->ci;
3088 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3098 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3099 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3101 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3102 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3104 const int ci_kw_len = kw_arg->keyword_len;
3105 const VALUE *
const ci_keywords = kw_arg->keywords;
3106 VALUE *argv = cfp->sp - calling->argc;
3107 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3108 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3110 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3111 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3113 int param = ISEQ_BODY(iseq)->param.size;
3114 int local = ISEQ_BODY(iseq)->local_table_size;
3115 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3122 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3125 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3126 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3128 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3129 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3130 VALUE *
const argv = cfp->sp - calling->argc;
3131 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3134 for (i=0; i<kw_param->num; i++) {
3135 klocals[i] = kw_param->default_values[i];
3142 int param = ISEQ_BODY(iseq)->param.size;
3143 int local = ISEQ_BODY(iseq)->local_table_size;
3144 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3154 cfp->sp -= (calling->argc + 1);
3155 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3156 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3165 set_table *dup_check_table = vm->unused_block_warning_table;
3175 .v = (
VALUE)cme->def,
3179 if (!strict_unused_block) {
3180 key = (st_data_t)cme->def->original_id;
3182 if (set_table_lookup(dup_check_table, key)) {
3192 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3197 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3198 fprintf(stderr,
"key:%p\n", (
void *)key);
3202 if (set_insert(dup_check_table, key)) {
3207 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3209 if (!
NIL_P(m_loc)) {
3210 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3214 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3221 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3226 VM_ASSERT((vm_ci_argc(ci), 1));
3227 VM_ASSERT(vm_cc_cme(cc) != NULL);
3229 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3230 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3231 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3232 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3235 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3236 if (LIKELY(rb_simple_iseq_p(iseq))) {
3238 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3239 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3241 if (calling->argc != lead_num) {
3242 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3246 VM_ASSERT(cc == calling->cc);
3248 if (vm_call_iseq_optimizable_p(ci, cc)) {
3249 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3251 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3252 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3253 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3256 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3261 else if (rb_iseq_only_optparam_p(iseq)) {
3264 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3265 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3267 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3268 const int argc = calling->argc;
3269 const int opt = argc - lead_num;
3271 if (opt < 0 || opt > opt_num) {
3272 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3275 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3276 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3277 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3278 vm_call_cacheable(ci, cc));
3281 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3282 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3283 vm_call_cacheable(ci, cc));
3287 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3288 for (
int i=argc; i<lead_num + opt_num; i++) {
3291 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3293 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3294 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3295 const int argc = calling->argc;
3296 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3298 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3301 if (argc - kw_arg->keyword_len == lead_num) {
3302 const int ci_kw_len = kw_arg->keyword_len;
3303 const VALUE *
const ci_keywords = kw_arg->keywords;
3305 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3307 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3308 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3310 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3311 vm_call_cacheable(ci, cc));
3316 else if (argc == lead_num) {
3318 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3319 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3321 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3323 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3324 vm_call_cacheable(ci, cc));
3350 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3351 bool can_fastpath =
true;
3353 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3355 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3356 ci = vm_ci_new_runtime(
3363 ci = forward_cd->caller_ci;
3365 can_fastpath =
false;
3369 if (!vm_ci_markable(ci)) {
3370 ci = vm_ci_new_runtime(
3375 can_fastpath =
false;
3377 argv[param_size - 1] = (
VALUE)ci;
3378 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3382 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3409 const VALUE * lep = VM_CF_LEP(cfp);
3415 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3420 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3424 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3426 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3427 VALUE * to = cfp->sp - 1;
3431 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3436 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3438 cfp->sp = to + argc;
3457 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3460 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3461 int param_size = ISEQ_BODY(iseq)->param.size;
3462 int local_size = ISEQ_BODY(iseq)->local_table_size;
3464 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3466 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3467 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3473 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3476 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3477 int param_size = ISEQ_BODY(iseq)->param.size;
3478 int local_size = ISEQ_BODY(iseq)->local_table_size;
3480 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3483 local_size = local_size + vm_ci_argc(calling->cd->ci);
3484 param_size = param_size + vm_ci_argc(calling->cd->ci);
3486 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3487 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3492 int opt_pc,
int param_size,
int local_size)
3497 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3498 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3501 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3507 int opt_pc,
int param_size,
int local_size)
3509 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3510 VALUE *argv = cfp->sp - calling->argc;
3511 VALUE *sp = argv + param_size;
3512 cfp->sp = argv - 1 ;
3514 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3515 calling->block_handler, (
VALUE)me,
3516 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3517 local_size - param_size,
3518 ISEQ_BODY(iseq)->stack_max);
3527 VALUE *argv = cfp->sp - calling->argc;
3529 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3530 VALUE *src_argv = argv;
3531 VALUE *sp_orig, *sp;
3532 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3534 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3535 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3536 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3537 dst_captured->code.val = src_captured->code.val;
3538 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3539 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3542 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3546 vm_pop_frame(ec, cfp, cfp->ep);
3549 sp_orig = sp = cfp->sp;
3552 sp[0] = calling->recv;
3556 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3557 *sp++ = src_argv[i];
3560 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3561 calling->recv, calling->block_handler, (
VALUE)me,
3562 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3563 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3564 ISEQ_BODY(iseq)->stack_max);
3572ractor_unsafe_check(
void)
3574 if (!rb_ractor_main_p()) {
3575 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3582 ractor_unsafe_check();
3590 ractor_unsafe_check();
3592 return (*f)(argc, argv, recv);
3598 ractor_unsafe_check();
3606 ractor_unsafe_check();
3608 return (*f)(recv, argv[0]);
3614 ractor_unsafe_check();
3616 return (*f)(recv, argv[0], argv[1]);
3622 ractor_unsafe_check();
3624 return (*f)(recv, argv[0], argv[1], argv[2]);
3630 ractor_unsafe_check();
3632 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3638 ractor_unsafe_check();
3639 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3640 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3646 ractor_unsafe_check();
3647 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3648 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3654 ractor_unsafe_check();
3655 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3656 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3662 ractor_unsafe_check();
3663 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3664 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3670 ractor_unsafe_check();
3671 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3672 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3678 ractor_unsafe_check();
3679 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3680 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3686 ractor_unsafe_check();
3687 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3688 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3694 ractor_unsafe_check();
3695 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3696 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3702 ractor_unsafe_check();
3703 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3704 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3710 ractor_unsafe_check();
3711 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3712 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3718 ractor_unsafe_check();
3719 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3720 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3734 return (*f)(argc, argv, recv);
3748 return (*f)(recv, argv[0]);
3755 return (*f)(recv, argv[0], argv[1]);
3762 return (*f)(recv, argv[0], argv[1], argv[2]);
3769 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3775 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3776 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3782 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3783 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3789 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3790 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3796 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3797 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3803 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3804 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3810 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3811 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3817 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3818 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3824 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3825 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3831 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3832 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3838 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3839 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3845 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3846 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3852 const int ov_flags = RAISED_STACKOVERFLOW;
3853 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3854 if (rb_ec_raised_p(ec, ov_flags)) {
3855 rb_ec_raised_reset(ec, ov_flags);
3861#define CHECK_CFP_CONSISTENCY(func) \
3862 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3863 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3869#if VM_DEBUG_VERIFY_METHOD_CACHE
3870 switch (me->def->type) {
3871 case VM_METHOD_TYPE_CFUNC:
3872 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3874# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3876 METHOD_BUG(ATTRSET);
3878 METHOD_BUG(BMETHOD);
3881 METHOD_BUG(OPTIMIZED);
3882 METHOD_BUG(MISSING);
3883 METHOD_BUG(REFINED);
3887 rb_bug(
"wrong method type: %d", me->def->type);
3890 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3897 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3904 VALUE recv = calling->recv;
3905 VALUE block_handler = calling->block_handler;
3906 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3908 if (UNLIKELY(calling->kw_splat)) {
3909 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3912 VM_ASSERT(reg_cfp == ec->cfp);
3914 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3917 vm_push_frame(ec, NULL, frame_type, recv,
3918 block_handler, (
VALUE)me,
3919 0, ec->cfp->sp, 0, 0);
3921 int len = cfunc->argc;
3924 reg_cfp->sp = stack_bottom;
3925 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3927 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3929 rb_vm_pop_frame(ec);
3931 VM_ASSERT(ec->cfp->sp == stack_bottom);
3933 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3934 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3944 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3946 VALUE *sp = ec->cfp->sp;
3947 VALUE recv = *(sp - recv_idx - 1);
3948 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3949 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3950#if VM_CHECK_MODE > 0
3952 *(GET_EC()->cfp->sp) =
Qfalse;
3954 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3959rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3961 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3967 int argc = calling->argc;
3968 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3969 VALUE *argv = &stack_bottom[1];
3971 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3978 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3980 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3982 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3983 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3986 VALUE *stack_bottom = reg_cfp->sp - 2;
3988 VM_ASSERT(calling->argc == 1);
3992 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3995 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3997 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
4004 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
4007 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
4008 return vm_call_cfunc_other(ec, reg_cfp, calling);
4012 calling->kw_splat = 0;
4014 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
4015 VALUE *sp = stack_bottom;
4016 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
4017 for(i = 0; i < argc; i++) {
4022 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
4028 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
4029 VALUE argv_ary = reg_cfp->sp[-1];
4033 int argc_offset = 0;
4035 if (UNLIKELY(argc > 0 &&
4037 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
4039 return vm_call_cfunc_other(ec, reg_cfp, calling);
4043 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
4049 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
4050 VALUE keyword_hash = reg_cfp->sp[-1];
4053 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
4056 return vm_call_cfunc_other(ec, reg_cfp, calling);
4063 RB_DEBUG_COUNTER_INC(ccf_cfunc);
4065 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4066 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
4068 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
4069 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
4071 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
4073 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4074 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4078 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4079 return vm_call_cfunc_other(ec, reg_cfp, calling);
4086 RB_DEBUG_COUNTER_INC(ccf_ivar);
4088 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4095 RB_DEBUG_COUNTER_INC(ccf_attrset);
4096 VALUE val = *(cfp->sp - 1);
4099 shape_id_t dest_shape_id;
4100 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4101 ID id = vm_cc_cme(cc)->def->body.attr.id;
4102 rb_check_frozen(obj);
4103 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4111 res = vm_setivar_class(obj,
id, val, dest_shape_id, index);
4112 if (!UNDEF_P(res)) {
4119 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4120 if (!UNDEF_P(res)) {
4125 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4133 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4143 VALUE procv = cme->def->body.bmethod.proc;
4146 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4147 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4151 GetProcPtr(procv, proc);
4152 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4162 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4166 VALUE procv = cme->def->body.bmethod.proc;
4169 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4170 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4174 GetProcPtr(procv, proc);
4175 const struct rb_block *block = &proc->block;
4177 while (vm_block_type(block) == block_type_proc) {
4178 block = vm_proc_block(block->as.proc);
4180 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4183 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4184 VALUE *
const argv = cfp->sp - calling->argc;
4185 const int arg_size = ISEQ_BODY(iseq)->param.size;
4188 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4189 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4192 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4197 vm_push_frame(ec, iseq,
4198 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4200 VM_GUARDED_PREV_EP(captured->ep),
4202 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4204 ISEQ_BODY(iseq)->local_table_size - arg_size,
4205 ISEQ_BODY(iseq)->stack_max);
4213 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4217 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4218 if (UNLIKELY(calling->heap_argv)) {
4223 argc = calling->argc;
4226 cfp->sp += - argc - 1;
4229 return vm_call_bmethod_body(ec, calling, argv);
4235 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4239 VALUE procv = cme->def->body.bmethod.proc;
4241 GetProcPtr(procv, proc);
4242 const struct rb_block *block = &proc->block;
4244 while (vm_block_type(block) == block_type_proc) {
4245 block = vm_proc_block(block->as.proc);
4247 if (vm_block_type(block) == block_type_iseq) {
4248 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4249 return vm_call_iseq_bmethod(ec, cfp, calling);
4252 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4253 return vm_call_noniseq_bmethod(ec, cfp, calling);
4257rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4259 VALUE klass = current_class;
4267 while (
RTEST(klass)) {
4269 if (owner == target_owner) {
4275 return current_class;
4284 if (orig_me->defined_class == 0) {
4285 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4286 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4287 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4289 if (me->def->reference_count == 1) {
4290 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4294 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4302 VM_ASSERT(callable_method_entry_p(cme));
4309 return aliased_callable_method_entry(me);
4315 calling->cc = &VM_CC_ON_STACK(
Qundef,
4318 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4320 return vm_call_method_each_type(ec, cfp, calling);
4323static enum method_missing_reason
4326 enum method_missing_reason stat = MISSING_NOENTRY;
4327 if (vm_ci_flag(ci) & VM_CALL_VCALL && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) stat |= MISSING_VCALL;
4328 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4329 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4339 ASSUME(calling->argc >= 0);
4341 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4342 int argc = calling->argc;
4343 VALUE recv = calling->recv;
4346 flags |= VM_CALL_OPT_SEND;
4348 if (UNLIKELY(! mid)) {
4349 mid = idMethodMissing;
4350 missing_reason = ci_missing_reason(ci);
4351 ec->method_missing_reason = missing_reason;
4354 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4355 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4356 rb_ary_unshift(argv_ary, symbol);
4359 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4360 VALUE exc = rb_make_no_method_exception(
4382 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4385 argc = ++calling->argc;
4387 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4390 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4391 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4392 VALUE exc = rb_make_no_method_exception(
4405 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4411 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4412 calling->cd = &new_fcd.cd;
4416 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4417 new_fcd.caller_ci = caller_ci;
4420 calling->cc = &VM_CC_ON_STACK(klass,
4422 { .method_missing_reason = missing_reason },
4423 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4425 if (flags & VM_CALL_FCALL) {
4426 return vm_call_method(ec, reg_cfp, calling);
4430 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4432 if (vm_cc_cme(cc) != NULL) {
4433 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4434 case METHOD_VISI_PUBLIC:
4435 return vm_call_method_each_type(ec, reg_cfp, calling);
4436 case METHOD_VISI_PRIVATE:
4437 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4439 case METHOD_VISI_PROTECTED:
4440 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4443 VM_UNREACHABLE(vm_call_method);
4445 return vm_call_method_missing(ec, reg_cfp, calling);
4448 return vm_call_method_nome(ec, reg_cfp, calling);
4458 i = calling->argc - 1;
4460 if (calling->argc == 0) {
4461 rb_raise(rb_eArgError,
"no method name given");
4485 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4491 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4493 int flags = VM_CALL_FCALL;
4497 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4498 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4500 flags |= VM_CALL_ARGS_SPLAT;
4501 if (calling->kw_splat) {
4502 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4503 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4504 calling->kw_splat = 0;
4506 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4509 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4510 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4516 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4517 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4523 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4526 int flags = vm_ci_flag(ci);
4528 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4529 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4530 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4531 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4532 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4533 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4536 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4537 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4542 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4544 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4546 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4547 unsigned int argc, flag;
4549 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4550 argc = ++calling->argc;
4553 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4554 vm_check_canary(ec, reg_cfp->sp);
4558 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4561 ec->method_missing_reason = reason;
4565 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4571 if (!(flag & VM_CALL_FORWARDING)) {
4572 calling->cd = &new_fcd.cd;
4576 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4577 new_fcd.caller_ci = caller_ci;
4581 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4582 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4583 return vm_call_method(ec, reg_cfp, calling);
4589 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4600 return vm_call_method_nome(ec, cfp, calling);
4602 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4603 cme->def->body.refined.orig_me) {
4604 cme = refined_method_callable_without_refinement(cme);
4607 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4609 return vm_call_method_each_type(ec, cfp, calling);
4613find_refinement(
VALUE refinements,
VALUE klass)
4615 if (
NIL_P(refinements)) {
4618 return rb_hash_lookup(refinements, klass);
4627 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4628 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4631 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4632 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4636 }
while (cfp->iseq != local_iseq);
4647 if (orig_me->defined_class == 0) {
4655 VM_ASSERT(callable_method_entry_p(cme));
4657 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4667 ID mid = vm_ci_mid(calling->cd->ci);
4668 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4672 for (; cref; cref = CREF_NEXT(cref)) {
4673 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4674 if (
NIL_P(refinement))
continue;
4677 rb_callable_method_entry(refinement, mid);
4680 if (vm_cc_call(cc) == vm_call_super_method) {
4683 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4688 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4689 cme->def != ref_me->def) {
4692 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4701 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4702 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4717 if (calling->cd->cc) {
4718 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4720 return vm_call_method(ec, cfp, calling);
4723 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4724 calling->cc= ref_cc;
4725 return vm_call_method(ec, cfp, calling);
4729 return vm_call_method_nome(ec, cfp, calling);
4735NOINLINE(
static VALUE
4743 int argc = calling->argc;
4746 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4749 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4755 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4758 VALUE procval = calling->recv;
4759 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4765 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4767 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4770 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4771 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4774 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4775 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4776 return vm_call_general(ec, reg_cfp, calling);
4783 VALUE recv = calling->recv;
4786 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4787 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4789 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4790 return internal_RSTRUCT_GET(recv,
off);
4796 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4798 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4806 VALUE recv = calling->recv;
4809 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4810 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4812 rb_check_frozen(recv);
4814 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4815 internal_RSTRUCT_SET(recv,
off, val);
4823 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4825 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4833#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4834 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4835 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4836 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4838 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4839 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4850 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4851 case OPTIMIZED_METHOD_TYPE_SEND:
4852 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4853 return vm_call_opt_send(ec, cfp, calling);
4854 case OPTIMIZED_METHOD_TYPE_CALL:
4855 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4856 return vm_call_opt_call(ec, cfp, calling);
4857 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4858 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4859 return vm_call_opt_block_call(ec, cfp, calling);
4860 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4861 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4865 VM_CALL_METHOD_ATTR(v,
4866 vm_call_opt_struct_aref(ec, cfp, calling),
4867 set_vm_cc_ivar(cc); \
4868 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4871 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4872 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4876 VM_CALL_METHOD_ATTR(v,
4877 vm_call_opt_struct_aset(ec, cfp, calling),
4878 set_vm_cc_ivar(cc); \
4879 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4883 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4895 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4897 switch (cme->def->type) {
4898 case VM_METHOD_TYPE_ISEQ:
4899 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4900 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4901 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4904 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4905 return vm_call_iseq_setup(ec, cfp, calling);
4908 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4909 case VM_METHOD_TYPE_CFUNC:
4910 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4911 return vm_call_cfunc(ec, cfp, calling);
4913 case VM_METHOD_TYPE_ATTRSET:
4914 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4918 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4920 if (vm_cc_markable(cc)) {
4921 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4922 VM_CALL_METHOD_ATTR(v,
4923 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4924 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4930 VM_CALLCACHE_UNMARKABLE |
4931 VM_CALLCACHE_ON_STACK,
4937 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4942 VM_CALL_METHOD_ATTR(v,
4943 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4944 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4948 case VM_METHOD_TYPE_IVAR:
4949 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4951 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4952 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4953 VM_CALL_METHOD_ATTR(v,
4954 vm_call_ivar(ec, cfp, calling),
4955 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4958 case VM_METHOD_TYPE_MISSING:
4959 vm_cc_method_missing_reason_set(cc, 0);
4960 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4961 return vm_call_method_missing(ec, cfp, calling);
4963 case VM_METHOD_TYPE_BMETHOD:
4964 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4965 return vm_call_bmethod(ec, cfp, calling);
4967 case VM_METHOD_TYPE_ALIAS:
4968 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4969 return vm_call_alias(ec, cfp, calling);
4971 case VM_METHOD_TYPE_OPTIMIZED:
4972 return vm_call_optimized(ec, cfp, calling, ci, cc);
4974 case VM_METHOD_TYPE_UNDEF:
4977 case VM_METHOD_TYPE_ZSUPER:
4978 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4980 case VM_METHOD_TYPE_REFINED:
4983 return vm_call_refined(ec, cfp, calling);
4986 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4996 const int stat = ci_missing_reason(ci);
4998 if (vm_ci_mid(ci) == idMethodMissing) {
4999 if (UNLIKELY(calling->heap_argv)) {
5004 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
5005 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
5009 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
5021 VALUE defined_class = me->defined_class;
5022 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
5023 return NIL_P(refined_class) ? defined_class : refined_class;
5032 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
5034 if (vm_cc_cme(cc) != NULL) {
5035 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
5036 case METHOD_VISI_PUBLIC:
5037 return vm_call_method_each_type(ec, cfp, calling);
5039 case METHOD_VISI_PRIVATE:
5040 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
5041 enum method_missing_reason stat = MISSING_PRIVATE;
5042 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
5044 vm_cc_method_missing_reason_set(cc, stat);
5045 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
5046 return vm_call_method_missing(ec, cfp, calling);
5048 return vm_call_method_each_type(ec, cfp, calling);
5050 case METHOD_VISI_PROTECTED:
5051 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
5052 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
5054 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
5055 return vm_call_method_missing(ec, cfp, calling);
5059 VM_ASSERT(vm_cc_cme(cc) != NULL);
5062 calling->cc = &cc_on_stack;
5063 return vm_call_method_each_type(ec, cfp, calling);
5066 return vm_call_method_each_type(ec, cfp, calling);
5069 rb_bug(
"unreachable");
5073 return vm_call_method_nome(ec, cfp, calling);
5080 RB_DEBUG_COUNTER_INC(ccf_general);
5081 return vm_call_method(ec, reg_cfp, calling);
5087 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5088 VM_ASSERT(cc != vm_cc_empty());
5090 *(vm_call_handler *)&cc->call_ = vm_call_general;
5096 RB_DEBUG_COUNTER_INC(ccf_super_method);
5101 if (ec == NULL) rb_bug(
"unreachable");
5104 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5105 return vm_call_method(ec, reg_cfp, calling);
5111vm_search_normal_superclass(
VALUE klass)
5116 klass =
RBASIC(klass)->klass;
5118 klass = RCLASS_ORIGIN(klass);
5122NORETURN(
static void vm_super_outside(
void));
5125vm_super_outside(
void)
5131empty_cc_for_super(
void)
5133 return &vm_empty_cc_for_super;
5139 VALUE current_defined_class;
5146 current_defined_class = vm_defined_class_for_protected_call(me);
5149 reg_cfp->iseq != method_entry_iseqptr(me) &&
5152 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5156 "self has wrong type to call super in this context: "
5157 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5162 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5164 "implicit argument passing of super from method defined"
5165 " by define_method() is not supported."
5166 " Specify all arguments explicitly.");
5169 ID mid = me->def->original_id;
5171 if (!vm_ci_markable(cd->ci)) {
5172 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5176 cd->ci = vm_ci_new_runtime(mid,
5179 vm_ci_kwarg(cd->ci));
5186 VALUE klass = vm_search_normal_superclass(me->defined_class);
5190 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5194 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5198 if (cached_cme == NULL) {
5200 cd->cc = empty_cc_for_super();
5202 else if (cached_cme->called_id != mid) {
5205 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5209 cd->cc = cc = empty_cc_for_super();
5213 switch (cached_cme->def->type) {
5215 case VM_METHOD_TYPE_REFINED:
5217 case VM_METHOD_TYPE_ATTRSET:
5218 case VM_METHOD_TYPE_IVAR:
5219 vm_cc_call_set(cc, vm_call_super_method);
5227 VM_ASSERT((vm_cc_cme(cc),
true));
5235block_proc_is_lambda(
const VALUE procval)
5240 GetProcPtr(procval, proc);
5241 return proc->is_lambda;
5251 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5254 int is_lambda = FALSE;
5255 VALUE val, arg, blockarg;
5257 const struct vm_ifunc *ifunc = captured->code.ifunc;
5262 else if (argc == 0) {
5269 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5271 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5273 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5276 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5279 VM_GUARDED_PREV_EP(captured->ep),
5281 0, ec->cfp->sp, 0, 0);
5282 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5283 rb_vm_pop_frame(ec);
5291 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5297 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5306 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5308 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5316vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5318 VALUE ary, arg0 = argv[0];
5323 VM_ASSERT(argv[0] == arg0);
5331 if (rb_simple_iseq_p(iseq)) {
5335 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5337 if (arg_setup_type == arg_setup_block &&
5338 calling->argc == 1 &&
5339 ISEQ_BODY(iseq)->param.flags.has_lead &&
5340 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5341 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5342 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5345 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5346 if (arg_setup_type == arg_setup_block) {
5347 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5349 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5350 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5351 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5353 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5354 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5358 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5365 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5374 calling = &calling_entry;
5375 calling->argc = argc;
5376 calling->block_handler = block_handler;
5377 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5379 calling->heap_argv = 0;
5381 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5383 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5391 bool is_lambda,
VALUE block_handler)
5394 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5395 const int arg_size = ISEQ_BODY(iseq)->param.size;
5396 VALUE *
const rsp = GET_SP() - calling->argc;
5397 VALUE *
const argv = rsp;
5398 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5399 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5403 vm_push_frame(ec, iseq,
5406 VM_GUARDED_PREV_EP(captured->ep), 0,
5407 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5409 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5417 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5419 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5420 int flags = vm_ci_flag(ci);
5422 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5423 ((calling->argc == 0) ||
5424 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5425 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5426 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5427 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5429 if (UNLIKELY(calling->heap_argv)) {
5430#if VM_ARGC_STACK_MAX < 0
5432 rb_raise(rb_eArgError,
"no receiver given");
5438 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5439 reg_cfp->sp[-2] = calling->recv;
5440 flags |= VM_CALL_ARGS_SPLAT;
5443 if (calling->argc < 1) {
5444 rb_raise(rb_eArgError,
"no receiver given");
5446 calling->recv = TOPN(--calling->argc);
5448 if (calling->kw_splat) {
5449 flags |= VM_CALL_KW_SPLAT;
5453 if (calling->argc < 1) {
5454 rb_raise(rb_eArgError,
"no receiver given");
5456 calling->recv = TOPN(--calling->argc);
5459 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5465 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5470 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5471 argc = calling->argc;
5472 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5478vm_proc_to_block_handler(
VALUE procval)
5480 const struct rb_block *block = vm_proc_block(procval);
5482 switch (vm_block_type(block)) {
5483 case block_type_iseq:
5484 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5485 case block_type_ifunc:
5486 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5487 case block_type_symbol:
5488 return VM_BH_FROM_SYMBOL(block->as.symbol);
5489 case block_type_proc:
5490 return VM_BH_FROM_PROC(block->as.proc);
5492 VM_UNREACHABLE(vm_yield_with_proc);
5499 bool is_lambda,
VALUE block_handler)
5501 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5502 VALUE proc = VM_BH_TO_PROC(block_handler);
5503 is_lambda = block_proc_is_lambda(proc);
5504 block_handler = vm_proc_to_block_handler(proc);
5507 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5513 bool is_lambda,
VALUE block_handler)
5517 bool is_lambda,
VALUE block_handler);
5519 switch (vm_block_handler_type(block_handler)) {
5520 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5521 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5522 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5523 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5524 default: rb_bug(
"vm_invoke_block: unreachable");
5527 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5531vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5538 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5541 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5542 captured->code.iseq = blockiseq;
5544 return rb_vm_make_proc(ec, captured,
rb_cProc);
5548vm_once_exec(
VALUE iseq)
5555vm_once_clear(
VALUE data)
5558 is->once.running_thread = NULL;
5570 args[0] = obj; args[1] =
Qfalse;
5572 if (!UNDEF_P(r) &&
RTEST(r)) {
5584 enum defined_type
type = (
enum defined_type)op_type;
5591 return rb_gvar_defined(
SYM2ID(obj));
5593 case DEFINED_CVAR: {
5594 const rb_cref_t *cref = vm_get_cref(GET_EP());
5595 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5600 case DEFINED_CONST_FROM: {
5601 bool allow_nil =
type == DEFINED_CONST;
5603 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5608 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5610 case DEFINED_METHOD:{
5615 switch (METHOD_ENTRY_VISI(me)) {
5616 case METHOD_VISI_PRIVATE:
5618 case METHOD_VISI_PROTECTED:
5622 case METHOD_VISI_PUBLIC:
5626 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5630 return check_respond_to_missing(obj, v);
5635 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5639 case DEFINED_ZSUPER:
5644 VALUE klass = vm_search_normal_superclass(me->defined_class);
5645 if (!klass)
return false;
5647 ID id = me->def->original_id;
5654 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5656 rb_bug(
"unimplemented defined? type (VM)");
5666 return vm_defined(ec, reg_cfp, op_type, obj, v);
5670vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5673 const VALUE *ep = reg_ep;
5674 for (i = 0; i < lv; i++) {
5675 ep = GET_PREV_EP(ep);
5681vm_get_special_object(
const VALUE *
const reg_ep,
5682 enum vm_special_object_type
type)
5685 case VM_SPECIAL_OBJECT_VMCORE:
5686 return rb_mRubyVMFrozenCore;
5687 case VM_SPECIAL_OBJECT_CBASE:
5688 return vm_get_cbase(reg_ep);
5689 case VM_SPECIAL_OBJECT_CONST_BASE:
5690 return vm_get_const_base(reg_ep);
5692 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5699rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5701 return vm_get_special_object(reg_ep,
type);
5707 const VALUE ary2 = ary2st;
5708 VALUE tmp1 = rb_check_to_array(ary1);
5709 VALUE tmp2 = rb_check_to_array(ary2);
5730 const VALUE ary2 = ary2st;
5732 if (
NIL_P(ary2))
return ary1;
5734 VALUE tmp2 = rb_check_to_array(ary2);
5749 return vm_concat_array(ary1, ary2st);
5753rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5755 return vm_concat_to_array(ary1, ary2st);
5764 VALUE tmp = rb_check_to_array(ary);
5768 else if (
RTEST(flag)) {
5781 return vm_splat_array(flag, ary);
5787 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5789 if (flag & VM_CHECKMATCH_ARRAY) {
5793 for (i = 0; i < n; i++) {
5795 VALUE c = check_match(ec, v, target,
type);
5804 return check_match(ec, pattern, target,
type);
5811 return vm_check_match(ec, target, pattern, flag);
5815vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5817 const VALUE kw_bits = *(ep - bits);
5820 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5821 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5834 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5835 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5836 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5837 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5841 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5844 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5847 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5850 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5857vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5862 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5863 return rb_public_const_get_at(cbase,
id);
5871vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5876 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5881 "superclass mismatch for class %"PRIsVALUE
"",
5894vm_check_if_module(
ID id,
VALUE mod)
5913vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5916 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5924vm_declare_module(
ID id,
VALUE cbase)
5930NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5934 VALUE name = rb_id2str(
id);
5935 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5937 VALUE location = rb_const_source_location_at(cbase,
id);
5938 if (!
NIL_P(location)) {
5939 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5940 " previous definition of %"PRIsVALUE
" was here",
5947vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5951 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5953 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5957 vm_check_if_namespace(cbase);
5962 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5963 if (!vm_check_if_class(
id, flags, super, klass))
5964 unmatched_redefinition(
"class", cbase,
id, klass);
5968 return vm_declare_class(
id, flags, cbase, super);
5973vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5977 vm_check_if_namespace(cbase);
5978 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5979 if (!vm_check_if_module(
id, mod))
5980 unmatched_redefinition(
"module", cbase,
id, mod);
5984 return vm_declare_module(
id, cbase);
5989vm_find_or_create_class_by_id(
ID id,
5994 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5997 case VM_DEFINECLASS_TYPE_CLASS:
5999 return vm_define_class(
id, flags, cbase, super);
6001 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
6005 case VM_DEFINECLASS_TYPE_MODULE:
6007 return vm_define_module(
id, flags, cbase);
6010 rb_bug(
"unknown defineclass type: %d", (
int)
type);
6014static rb_method_visibility_t
6019 if (!vm_env_cref_by_cref(cfp->ep)) {
6020 return METHOD_VISI_PUBLIC;
6023 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
6032 if (!vm_env_cref_by_cref(cfp->ep)) {
6036 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
6044 rb_method_visibility_t visi;
6049 visi = METHOD_VISI_PUBLIC;
6052 klass = CREF_CLASS_FOR_DEFINITION(cref);
6053 visi = vm_scope_visibility_get(ec);
6060 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6063 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6066 if (!is_singleton && vm_scope_module_func_check(ec)) {
6068 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6078 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6080 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6081 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6084 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6088enum method_explorer_type {
6090 mexp_search_invokeblock,
6099 VALUE block_handler,
6100 enum method_explorer_type method_explorer
6105 int argc = vm_ci_argc(ci);
6106 VALUE recv = TOPN(argc);
6108 .block_handler = block_handler,
6109 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6115 switch (method_explorer) {
6116 case mexp_search_method:
6117 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
6118 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6120 case mexp_search_super:
6121 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6122 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6124 case mexp_search_invokeblock:
6125 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6135 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6136 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6149 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6151 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6153 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6165 VALUE bh = VM_BLOCK_HANDLER_NONE;
6166 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6176 VALUE bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6177 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6190 VALUE bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6192 VALUE val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6194 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6206 VALUE bh = VM_BLOCK_HANDLER_NONE;
6207 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6235 if (check_method_basic_definition(cme)) {
6244 if (check_cfunc(cme, rb_mod_to_s)) {
6250 val = rb_mod_to_s(recv);
6256 if (check_cfunc(cme, rb_nil_to_s)) {
6257 return rb_nil_to_s(recv);
6261 if (check_cfunc(cme, rb_true_to_s)) {
6262 return rb_true_to_s(recv);
6266 if (check_cfunc(cme, rb_false_to_s)) {
6267 return rb_false_to_s(recv);
6271 if (check_cfunc(cme, rb_int_to_s)) {
6272 return rb_fix_to_s(recv);
6284 return vm_objtostring(iseq, recv, cd);
6288vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6290 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6299vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6301 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6310vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6312 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6326 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6330 VALUE args[1] = {target};
6333 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6336 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6343 return vm_opt_duparray_include_p(ec, ary, target);
6349 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6354 VALUE result = *ptr;
6355 rb_snum_t i = num - 1;
6357 const VALUE v = *++ptr;
6358 if (OPTIMIZED_CMP(v, result) > 0) {
6373 return vm_opt_newarray_max(ec, num, ptr);
6379 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6384 VALUE result = *ptr;
6385 rb_snum_t i = num - 1;
6387 const VALUE v = *++ptr;
6388 if (OPTIMIZED_CMP(v, result) < 0) {
6403 return vm_opt_newarray_min(ec, num, ptr);
6410 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6411 return rb_ary_hash_values(num, ptr);
6421 return vm_opt_newarray_hash(ec, num, ptr);
6430 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6431 struct RArray fake_ary = {RBASIC_INIT};
6432 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6436 VALUE args[1] = {target};
6444 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6450 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6451 struct RArray fake_ary = {RBASIC_INIT};
6452 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6453 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6463 if (!UNDEF_P(buffer)) {
6464 args[1] = rb_hash_new_with_size(1);
6465 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6470 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6477 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6483 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6489vm_track_constant_cache(
ID id,
void *ic)
6492 struct rb_id_table *const_cache = vm->constant_cache;
6493 VALUE lookup_result;
6496 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6500 ics = set_init_numtable();
6501 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6516 vm->inserting_constant_cache_id = id;
6518 set_insert(ics, (st_data_t)ic);
6520 vm->inserting_constant_cache_id = (
ID)0;
6527 for (
int i = 0; segments[i]; i++) {
6528 ID id = segments[i];
6529 if (
id == idNULL)
continue;
6530 vm_track_constant_cache(
id, ic);
6539 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6540 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6542 return (ic_cref == NULL ||
6543 ic_cref == vm_get_cref(reg_ep));
6551 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6552 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6557rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6559 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6565 if (ruby_vm_const_missing_count > 0) {
6566 ruby_vm_const_missing_count = 0;
6573 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6578 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6579 rb_yjit_constant_ic_update(iseq, ic, pos);
6588 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6591 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6594 ruby_vm_constant_cache_misses++;
6595 val = vm_get_ev_const_chain(ec, segments);
6596 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6599 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6611 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6612 return is->once.value;
6614 else if (is->once.running_thread == NULL) {
6616 is->once.running_thread = th;
6620 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6623 else if (is->once.running_thread == th) {
6625 return vm_once_exec((
VALUE)iseq);
6629 RUBY_VM_CHECK_INTS(ec);
6636vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6638 switch (OBJ_BUILTIN_TYPE(key)) {
6644 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6645 SYMBOL_REDEFINED_OP_FLAG |
6646 INTEGER_REDEFINED_OP_FLAG |
6647 FLOAT_REDEFINED_OP_FLAG |
6648 NIL_REDEFINED_OP_FLAG |
6649 TRUE_REDEFINED_OP_FLAG |
6650 FALSE_REDEFINED_OP_FLAG |
6651 STRING_REDEFINED_OP_FLAG)) {
6655 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6659 if (rb_hash_stlike_lookup(hash, key, &val)) {
6679 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6680 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6681 static const char stack_consistency_error[] =
6682 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6683#if defined RUBY_DEVEL
6684 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6689 rb_bug(stack_consistency_error, nsp, nbp);
6696 if (FIXNUM_2_P(recv, obj) &&
6697 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6698 return rb_fix_plus_fix(recv, obj);
6700 else if (FLONUM_2_P(recv, obj) &&
6701 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6709 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6714 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6715 return rb_str_opt_plus(recv, obj);
6719 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6730 if (FIXNUM_2_P(recv, obj) &&
6731 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6732 return rb_fix_minus_fix(recv, obj);
6734 else if (FLONUM_2_P(recv, obj) &&
6735 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6743 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6754 if (FIXNUM_2_P(recv, obj) &&
6755 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6756 return rb_fix_mul_fix(recv, obj);
6758 else if (FLONUM_2_P(recv, obj) &&
6759 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6767 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6778 if (FIXNUM_2_P(recv, obj) &&
6779 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6780 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6782 else if (FLONUM_2_P(recv, obj) &&
6783 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6784 return rb_flo_div_flo(recv, obj);
6791 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6792 return rb_flo_div_flo(recv, obj);
6802 if (FIXNUM_2_P(recv, obj) &&
6803 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6804 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6806 else if (FLONUM_2_P(recv, obj) &&
6807 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6815 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6826 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6827 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6829 if (!UNDEF_P(val)) {
6830 return RBOOL(!
RTEST(val));
6840 if (FIXNUM_2_P(recv, obj) &&
6841 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6844 else if (FLONUM_2_P(recv, obj) &&
6845 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6853 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6865 if (FIXNUM_2_P(recv, obj) &&
6866 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6869 else if (FLONUM_2_P(recv, obj) &&
6870 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6878 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6890 if (FIXNUM_2_P(recv, obj) &&
6891 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6894 else if (FLONUM_2_P(recv, obj) &&
6895 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6903 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6915 if (FIXNUM_2_P(recv, obj) &&
6916 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6919 else if (FLONUM_2_P(recv, obj) &&
6920 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6928 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6945 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6954 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6972 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6983 if (FIXNUM_2_P(recv, obj) &&
6984 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6996 if (FIXNUM_2_P(recv, obj) &&
6997 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6998 return rb_fix_aref(recv, obj);
7003 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
7005 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
7008 return rb_ary_aref1(recv, obj);
7012 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
7013 return rb_hash_aref(recv, obj);
7027 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
7033 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
7034 rb_hash_aset(recv, obj, set);
7043vm_opt_length(
VALUE recv,
int bop)
7049 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7050 if (bop == BOP_EMPTY_P) {
7051 return LONG2NUM(RSTRING_LEN(recv));
7058 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7062 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7071vm_opt_empty_p(
VALUE recv)
7073 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7086 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7089 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7105 case RSHIFT(~0UL, 1):
7108 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7126vm_opt_succ(
VALUE recv)
7129 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7130 return fix_succ(recv);
7136 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7147 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7148 return RBOOL(!
RTEST(recv));
7163 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7167 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7185 VALUE self = GET_SELF();
7187 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7189 if (event & global_hooks->events) {
7192 vm_dtrace(event, ec);
7193 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7199 if (local_hooks != NULL) {
7200 if (event & local_hooks->events) {
7203 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7209#define VM_TRACE_HOOK(target_event, val) do { \
7210 if ((pc_events & (target_event)) & enabled_flags) { \
7211 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7218 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7219 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7220 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7226 const VALUE *pc = reg_cfp->pc;
7227 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7230 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7236 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7239 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7240 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7244 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7245 enabled_flags |= iseq_local_events;
7247 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7249 if (bmethod_frame) {
7251 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7252 bmethod_local_hooks = me->def->body.bmethod.hooks;
7253 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7254 if (bmethod_local_hooks) {
7255 bmethod_local_events = bmethod_local_hooks->events;
7260 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7264 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7272 else if (ec->trace_arg != NULL) {
7280 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7283 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7286 RSTRING_PTR(rb_iseq_path(iseq)),
7287 (
int)rb_iseq_line_no(iseq, pos),
7288 RSTRING_PTR(rb_iseq_label(iseq)));
7290 VM_ASSERT(reg_cfp->pc == pc);
7291 VM_ASSERT(pc_events != 0);
7301 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7302 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7320#if VM_CHECK_MODE > 0
7321NORETURN( NOINLINE( COLDFUNC
7322void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7325Init_vm_stack_canary(
void)
7328 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7329 vm_stack_canary |= 0x01;
7331 vm_stack_canary_was_born =
true;
7336rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7340 const char *insn = rb_insns_name(i);
7344 rb_bug(
"dead canary found at %s: %s", insn, str);
7348void Init_vm_stack_canary(
void) { }
7380 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7387 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7394 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7401 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7408 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7415 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7422 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7429 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7436 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7442 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7443 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7449 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7450 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7456 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7457 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7463 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7464 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7470 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7471 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7477 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7478 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7484 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7485 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7490static builtin_invoker
7491lookup_builtin_invoker(
int argc)
7493 static const builtin_invoker invokers[] = {
7512 return invokers[argc];
7518 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7519 SETUP_CANARY(canary_p);
7520 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7521 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7522 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7529 return invoke_bf(ec, cfp, bf, argv);
7536 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7537 for (
int i=0; i<bf->argc; i++) {
7538 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7540 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7541 (
void *)(uintptr_t)bf->func_ptr);
7544 if (bf->argc == 0) {
7545 return invoke_bf(ec, cfp, bf, NULL);
7548 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7549 return invoke_bf(ec, cfp, bf, argv);
7559 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.