11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
34#include "insns_info.inc"
40 int argc,
const VALUE *argv,
int priv);
50ruby_vm_special_exception_copy(
VALUE exc)
53 rb_obj_copy_ivar(e, exc);
61 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
62 ec->raised_flag = RAISED_STACKOVERFLOW;
64 VALUE at = rb_ec_backtrace_object(ec);
65 mesg = ruby_vm_special_exception_copy(mesg);
70 EC_JUMP_TAG(ec, TAG_RAISE);
73NORETURN(
static void vm_stackoverflow(
void));
78 ec_stack_overflow(GET_EC(), TRUE);
91 rb_bug(
"system stack overflow during GC. Faulty native extension?");
94 ec->raised_flag = RAISED_STACKOVERFLOW;
95 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
96 EC_JUMP_TAG(ec, TAG_RAISE);
98 ec_stack_overflow(ec, crit == 0);
105callable_class_p(
VALUE klass)
107#if VM_CHECK_MODE >= 2
108 if (!klass)
return FALSE;
136 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment));
138 if (callable_class_p(cme->defined_class)) {
148vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
150 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
151 enum imemo_type cref_or_me_type = imemo_env;
154 cref_or_me_type = imemo_type(cref_or_me);
156 if (
type & VM_FRAME_FLAG_BMETHOD) {
160 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
161 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
163 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
164 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
168 if (cref_or_me_type != imemo_ment) {
169 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
173 if (req_cref && cref_or_me_type != imemo_cref) {
174 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
177 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
178 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
182 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
188 if (cref_or_me_type == imemo_ment) {
191 if (!callable_method_entry_p(me)) {
192 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
196 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
197 VM_ASSERT(iseq == NULL ||
199 RUBY_VM_NORMAL_ISEQ_P(iseq)
203 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
213 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
216#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
218 vm_check_frame_detail(type, req_block, req_me, req_cref, \
219 specval, cref_or_me, is_cframe, iseq); \
221 switch (given_magic) {
223 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
224 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
225 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
226 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
227 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
228 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
229 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
230 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
231 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
233 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
238static VALUE vm_stack_canary;
239static bool vm_stack_canary_was_born =
false;
246 unsigned int pos = 0;
247 while (pos < ISEQ_BODY(iseq)->iseq_size) {
248 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
249 unsigned int next_pos = pos + insn_len(opcode);
250 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
255 rb_bug(
"failed to find the previous insn");
264 if (! LIKELY(vm_stack_canary_was_born)) {
267 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
271 else if (! (iseq = GET_ISEQ())) {
274 else if (LIKELY(sp[0] != vm_stack_canary)) {
283 const VALUE *orig = rb_iseq_original_iseq(iseq);
284 const VALUE iseqw = rb_iseqw_new(iseq);
286 const char *stri = rb_str_to_cstr(inspection);
287 const VALUE disasm = rb_iseq_disasm(iseq);
288 const char *strd = rb_str_to_cstr(disasm);
289 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
290 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
291 const char *name = insn_name(insn);
297 "We are killing the stack canary set by %s, "
298 "at %s@pc=%"PRIdPTR
"\n"
299 "watch out the C stack trace.\n"
301 name, stri, pos, strd);
302 rb_bug(
"see above.");
304#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
307#define vm_check_canary(ec, sp)
308#define vm_check_frame(a, b, c, d)
313vm_push_frame_debug_counter_inc(
320 RB_DEBUG_COUNTER_INC(frame_push);
322 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
323 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
324 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
327 RB_DEBUG_COUNTER_INC(frame_R2R);
330 RB_DEBUG_COUNTER_INC(frame_R2C);
335 RB_DEBUG_COUNTER_INC(frame_C2R);
338 RB_DEBUG_COUNTER_INC(frame_C2C);
343 switch (
type & VM_FRAME_MAGIC_MASK) {
344 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
345 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
346 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
347 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
348 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
349 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
350 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
351 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
352 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
355 rb_bug(
"unreachable");
358#define vm_push_frame_debug_counter_inc(ec, cfp, t)
363rb_vm_stack_canary(
void)
366 return vm_stack_canary;
372STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
373STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
374STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
390 vm_check_frame(
type, specval, cref_or_me, iseq);
391 VM_ASSERT(local_size >= 0);
394 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
395 vm_check_canary(ec, sp);
400 for (
int i=0; i < local_size; i++) {
427 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
428 atomic_signal_fence(memory_order_seq_cst);
436 vm_push_frame_debug_counter_inc(ec, cfp,
type);
444 if (VMDEBUG == 2) SDR();
446 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
453 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
455 if (VMDEBUG == 2) SDR();
457 RUBY_VM_CHECK_INTS(ec);
458 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
460 return flags & VM_FRAME_FLAG_FINISH;
466 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
474 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
478 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
480 VM_BLOCK_HANDLER_NONE,
487 return (
VALUE)dmy_iseq;
492rb_arity_error_new(
int argc,
int min,
int max)
494 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
502 rb_str_catf(err_mess,
"..%d", max);
509rb_error_arity(
int argc,
int min,
int max)
516NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
519vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
522 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
523 VM_FORCE_WRITE(&ep[index], v);
524 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
525 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
530vm_env_write(
const VALUE *ep,
int index,
VALUE v)
532 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
533 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
534 VM_STACK_ENV_WRITE(ep, index, v);
537 vm_env_write_slowpath(ep, index, v);
542rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
544 vm_env_write(ep, index, v);
550 if (block_handler == VM_BLOCK_HANDLER_NONE) {
554 switch (vm_block_handler_type(block_handler)) {
555 case block_handler_type_iseq:
556 case block_handler_type_ifunc:
557 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
558 case block_handler_type_symbol:
559 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
560 case block_handler_type_proc:
561 return VM_BH_TO_PROC(block_handler);
563 VM_UNREACHABLE(rb_vm_bh_to_procval);
572vm_svar_valid_p(
VALUE svar)
575 switch (imemo_type(svar)) {
584 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
594 if (lep && (ec == NULL || ec->root_lep != lep)) {
595 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
598 svar = ec->root_svar;
601 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
609 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
611 if (lep && (ec == NULL || ec->root_lep != lep)) {
612 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
615 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
622 const struct vm_svar *svar = lep_svar(ec, lep);
627 case VM_SVAR_LASTLINE:
628 return svar->lastline;
629 case VM_SVAR_BACKREF:
630 return svar->backref;
632 const VALUE ary = svar->others;
638 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
647 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
658 struct vm_svar *svar = lep_svar(ec, lep);
661 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
665 case VM_SVAR_LASTLINE:
668 case VM_SVAR_BACKREF:
672 VALUE ary = svar->others;
677 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
688 val = lep_svar_get(ec, lep, key);
691 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
708 rb_bug(
"unexpected back-ref");
721 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
731 return rb_reg_last_defined(backref);
733 rb_bug(
"unexpected back-ref");
737 nth = (int)(
type >> 1);
744check_method_entry(
VALUE obj,
int can_be_svar)
746 if (obj ==
Qfalse)
return NULL;
749 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
752 switch (imemo_type(obj)) {
763 rb_bug(
"check_method_entry: svar should not be there:");
772 const VALUE *ep = cfp->ep;
775 while (!VM_ENV_LOCAL_P(ep)) {
776 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
777 ep = VM_ENV_PREV_EP(ep);
780 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
786 switch (me->def->type) {
787 case VM_METHOD_TYPE_ISEQ:
788 return me->def->body.iseq.
iseqptr;
797 switch (me->def->type) {
798 case VM_METHOD_TYPE_ISEQ:
799 return me->def->body.iseq.
cref;
805#if VM_CHECK_MODE == 0
809check_cref(
VALUE obj,
int can_be_svar)
811 if (obj ==
Qfalse)
return NULL;
814 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
817 switch (imemo_type(obj)) {
828 rb_bug(
"check_method_entry: svar should not be there:");
835vm_env_cref(
const VALUE *ep)
839 while (!VM_ENV_LOCAL_P(ep)) {
840 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
841 ep = VM_ENV_PREV_EP(ep);
844 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
848is_cref(
const VALUE v,
int can_be_svar)
851 switch (imemo_type(v)) {
864vm_env_cref_by_cref(
const VALUE *ep)
866 while (!VM_ENV_LOCAL_P(ep)) {
867 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
868 ep = VM_ENV_PREV_EP(ep);
870 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
874cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
876 const VALUE v = *vptr;
880 switch (imemo_type(v)) {
883 new_cref = vm_cref_dup(cref);
888 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
893 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
897 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
906vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
908 if (vm_env_cref_by_cref(ep)) {
912 while (!VM_ENV_LOCAL_P(ep)) {
913 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
914 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
917 ep = VM_ENV_PREV_EP(ep);
919 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
920 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
923 rb_bug(
"vm_cref_dup: unreachable");
928vm_get_cref(
const VALUE *ep)
936 rb_bug(
"vm_get_cref: unreachable");
941rb_vm_get_cref(
const VALUE *ep)
943 return vm_get_cref(ep);
954 return vm_get_cref(cfp->ep);
958vm_get_const_key_cref(
const VALUE *ep)
964 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
965 RCLASS_EXT(CREF_CLASS(cref))->cloned) {
968 cref = CREF_NEXT(cref);
981 if (CREF_CLASS(cref) == old_klass) {
982 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
983 *new_cref_ptr = new_cref;
986 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
987 cref = CREF_NEXT(cref);
988 *new_cref_ptr = new_cref;
989 new_cref_ptr = &new_cref->next;
991 *new_cref_ptr = NULL;
1000 prev_cref = vm_env_cref(ep);
1006 prev_cref = vm_env_cref(cfp->ep);
1010 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1014vm_get_cbase(
const VALUE *ep)
1016 const rb_cref_t *cref = vm_get_cref(ep);
1018 return CREF_CLASS_FOR_DEFINITION(cref);
1022vm_get_const_base(
const VALUE *ep)
1024 const rb_cref_t *cref = vm_get_cref(ep);
1027 if (!CREF_PUSHED_BY_EVAL(cref)) {
1028 return CREF_CLASS_FOR_DEFINITION(cref);
1030 cref = CREF_NEXT(cref);
1037vm_check_if_namespace(
VALUE klass)
1040 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1045vm_ensure_not_refinement_module(
VALUE self)
1048 rb_warn(
"not defined at the refinement, but at the outer class/module");
1064 if (
NIL_P(orig_klass) && allow_nil) {
1066 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1070 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1071 root_cref = CREF_NEXT(root_cref);
1074 while (cref && CREF_NEXT(cref)) {
1075 if (CREF_PUSHED_BY_EVAL(cref)) {
1079 klass = CREF_CLASS(cref);
1081 cref = CREF_NEXT(cref);
1083 if (!
NIL_P(klass)) {
1087 if ((ce = rb_const_lookup(klass,
id))) {
1088 rb_const_warn_if_deprecated(ce, klass,
id);
1091 if (am == klass)
break;
1093 if (is_defined)
return 1;
1094 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1096 goto search_continue;
1103 if (UNLIKELY(!rb_ractor_main_p())) {
1105 rb_raise(rb_eRactorIsolationError,
1106 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1117 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1118 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1132 vm_check_if_namespace(orig_klass);
1134 return rb_public_const_defined_from(orig_klass,
id);
1137 return rb_public_const_get_from(orig_klass,
id);
1145 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1153 int allow_nil = TRUE;
1154 if (segments[0] == idNULL) {
1159 while (segments[idx]) {
1160 ID id = segments[idx++];
1161 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1174 rb_bug(
"vm_get_cvar_base: no cref");
1177 while (CREF_NEXT(cref) &&
1178 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1179 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1180 cref = CREF_NEXT(cref);
1182 if (top_level_raise && !CREF_NEXT(cref)) {
1186 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1194ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1196fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1199 vm_cc_attr_index_set(cc, index, shape_id);
1202 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1206#define ractor_incidental_shareable_p(cond, val) \
1207 (!(cond) || rb_ractor_shareable_p(val))
1208#define ractor_object_incidental_shareable_p(obj, val) \
1209 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1211#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1219 shape_id_t shape_id;
1223 return default_value;
1226#if SHAPE_IN_BASIC_FLAGS
1227 shape_id = RBASIC_SHAPE_ID(obj);
1235#if !SHAPE_IN_BASIC_FLAGS
1236 shape_id = ROBJECT_SHAPE_ID(obj);
1242 if (UNLIKELY(!rb_ractor_main_p())) {
1250 if (default_value ==
Qundef) {
1258 ivar_list = RCLASS_IVPTR(obj);
1260#if !SHAPE_IN_BASIC_FLAGS
1261 shape_id = RCLASS_SHAPE_ID(obj);
1269 rb_gen_ivtbl_get(obj,
id, &ivtbl);
1270#if !SHAPE_IN_BASIC_FLAGS
1271 shape_id = ivtbl->shape_id;
1273 ivar_list = ivtbl->as.shape.ivptr;
1276 return default_value;
1280 shape_id_t cached_id;
1284 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1287 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1290 if (LIKELY(cached_id == shape_id)) {
1291 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1293 if (index == ATTR_INDEX_NOT_SET) {
1294 return default_value;
1297 val = ivar_list[index];
1298#if USE_DEBUG_COUNTER
1299 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1302 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1308#if USE_DEBUG_COUNTER
1310 if (cached_id != INVALID_SHAPE_ID) {
1311 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1314 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1318 if (cached_id != INVALID_SHAPE_ID) {
1319 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1322 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1325 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1328 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1332 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1337 table = (
st_table *)RCLASS_IVPTR(obj);
1341 table = ROBJECT_IV_HASH(obj);
1346 if (rb_gen_ivtbl_get(obj, 0, &ivtbl)) {
1347 table = ivtbl->as.complex.table;
1353 if (!table || !st_lookup(table,
id, &val)) {
1354 val = default_value;
1358 shape_id_t previous_cached_id = cached_id;
1359 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1362 if (cached_id != previous_cached_id) {
1363 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1366 if (index == ATTR_INDEX_NOT_SET) {
1367 val = default_value;
1371 val = ivar_list[index];
1377 vm_cc_attr_index_initialize(cc, shape_id);
1380 vm_ic_attr_index_initialize(ic, shape_id);
1383 val = default_value;
1389 if (!UNDEF_P(default_value)) {
1397 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1400 return rb_attr_get(obj,
id);
1408populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1410 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1414 vm_cc_attr_index_set(cc, index, next_shape_id);
1417 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1429 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1432 rb_check_frozen(obj);
1434 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1436 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1438 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1439 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1442 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1452 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1458 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1461NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1463vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1465#if SHAPE_IN_BASIC_FLAGS
1466 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1468 shape_id_t shape_id = rb_generic_shape_id(obj);
1474 if (shape_id == dest_shape_id) {
1475 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1477 else if (dest_shape_id != INVALID_SHAPE_ID) {
1478 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1479 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1481 if (shape_id == dest_shape->parent_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1492 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1494 if (shape_id != dest_shape_id) {
1495#if SHAPE_IN_BASIC_FLAGS
1496 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1498 ivtbl->shape_id = dest_shape_id;
1504 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1510vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1518 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1519 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1521 if (LIKELY(shape_id == dest_shape_id)) {
1522 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1525 else if (dest_shape_id != INVALID_SHAPE_ID) {
1526 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1527 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1528 shape_id_t source_shape_id = dest_shape->parent_id;
1530 if (shape_id == source_shape_id && dest_shape->edge_name ==
id && shape->capacity == dest_shape->capacity) {
1531 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1533 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1535 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id),
id) == dest_shape);
1551 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1552 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1558 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1570 VALUE defined_class = 0;
1574 defined_class =
RBASIC(defined_class)->klass;
1577 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1579 rb_bug(
"the cvc table should be set");
1583 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1584 rb_bug(
"should have cvar cache entry");
1589 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1605 cref = vm_get_cref(GET_EP());
1607 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1608 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1610 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1616 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1618 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1624 return vm_getclassvariable(iseq, cfp,
id, ic);
1631 cref = vm_get_cref(GET_EP());
1633 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1634 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1636 rb_class_ivar_set(ic->entry->class_value,
id, val);
1640 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1644 update_classvariable_cache(iseq, klass,
id, cref, ic);
1650 vm_setclassvariable(iseq, cfp,
id, val, ic);
1656 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1667 shape_id_t dest_shape_id;
1669 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1671 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1678 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1682 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1689 vm_setinstancevariable(iseq, obj,
id, val, ic);
1698 ec->tag->state = RUBY_TAG_FATAL;
1701 ec->tag->state = TAG_THROW;
1703 else if (THROW_DATA_P(err)) {
1704 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1707 ec->tag->state = TAG_RAISE;
1714 const int flag,
const VALUE throwobj)
1722 else if (state == TAG_BREAK) {
1724 const VALUE *ep = GET_EP();
1725 const rb_iseq_t *base_iseq = GET_ISEQ();
1726 escape_cfp = reg_cfp;
1728 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1729 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1730 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1731 ep = escape_cfp->ep;
1732 base_iseq = escape_cfp->iseq;
1735 ep = VM_ENV_PREV_EP(ep);
1736 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1737 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1738 VM_ASSERT(escape_cfp->iseq == base_iseq);
1742 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1748 ep = VM_ENV_PREV_EP(ep);
1750 while (escape_cfp < eocfp) {
1751 if (escape_cfp->ep == ep) {
1752 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1753 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1758 for (i=0; i < ct->size; i++) {
1760 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1762 if (entry->type == CATCH_TYPE_BREAK &&
1763 entry->iseq == base_iseq &&
1764 entry->start < epc && entry->end >= epc) {
1765 if (entry->cont == epc) {
1774 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1779 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1782 else if (state == TAG_RETRY) {
1783 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1785 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1787 else if (state == TAG_RETURN) {
1788 const VALUE *current_ep = GET_EP();
1789 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1790 int in_class_frame = 0;
1792 escape_cfp = reg_cfp;
1795 while (!VM_ENV_LOCAL_P(ep)) {
1796 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1799 ep = VM_ENV_PREV_EP(ep);
1803 while (escape_cfp < eocfp) {
1804 const VALUE *lep = VM_CF_LEP(escape_cfp);
1810 if (lep == target_lep &&
1811 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1812 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1817 if (lep == target_lep) {
1818 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1820 if (in_class_frame) {
1825 const VALUE *tep = current_ep;
1827 while (target_lep != tep) {
1828 if (escape_cfp->ep == tep) {
1830 if (tep == target_ep) {
1834 goto unexpected_return;
1837 tep = VM_ENV_PREV_EP(tep);
1841 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1842 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1844 case ISEQ_TYPE_MAIN:
1846 if (in_class_frame)
goto unexpected_return;
1847 if (target_ep == NULL) {
1851 goto unexpected_return;
1855 case ISEQ_TYPE_EVAL: {
1857 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1858 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1859 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1860 t = ISEQ_BODY(is)->type;
1862 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1865 case ISEQ_TYPE_CLASS:
1874 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1875 if (target_ep == NULL) {
1879 goto unexpected_return;
1883 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1886 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1892 rb_bug(
"isns(throw): unsupported throw type");
1895 ec->tag->state = state;
1896 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1901 rb_num_t throw_state,
VALUE throwobj)
1903 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1904 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1907 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1910 return vm_throw_continue(ec, throwobj);
1917 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1923 int is_splat = flag & 0x01;
1926 const VALUE obj = ary;
1938 if (num + is_splat == 0) {
1941 else if (flag & 0x02) {
1946 for (i = 0; i < num -
len; i++) {
1951 for (j = 0; i < num; i++, j++) {
1964 *cfp->sp++ = rb_ary_new();
1973 for (; i < num -
len; i++) {
1977 for (rb_num_t j = 0; i < num; i++, j++) {
1978 *cfp->sp++ = ptr[
len - j - 1];
1982 for (rb_num_t j = 0; j < num; j++) {
1983 *cfp->sp++ = ptr[num - j - 1];
1999#if VM_CHECK_MODE > 0
2000 ccs->debug_sig = ~(
VALUE)ccs;
2006 ccs->entries = NULL;
2008 rb_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2016 if (! vm_cc_markable(cc)) {
2020 if (UNLIKELY(ccs->len == ccs->capa)) {
2021 if (ccs->capa == 0) {
2023 ccs->entries =
ALLOC_N(
struct rb_class_cc_entries_entry, ccs->capa);
2027 REALLOC_N(ccs->entries,
struct rb_class_cc_entries_entry, ccs->capa);
2030 VM_ASSERT(ccs->len < ccs->capa);
2032 const int pos = ccs->len++;
2033 ccs->entries[pos].argc = vm_ci_argc(ci);
2034 ccs->entries[pos].flag = vm_ci_flag(ci);
2037 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2043#if VM_CHECK_MODE > 0
2047 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2048 for (
int i=0; i<ccs->len; i++) {
2049 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2050 ccs->entries[i].flag,
2051 ccs->entries[i].argc);
2052 rp(ccs->entries[i].cc);
2059 VM_ASSERT(vm_ccs_p(ccs));
2060 VM_ASSERT(ccs->len <= ccs->capa);
2062 for (
int i=0; i<ccs->len; i++) {
2065 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2066 VM_ASSERT(vm_cc_class_check(cc, klass));
2067 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2068 VM_ASSERT(!vm_cc_super_p(cc));
2069 VM_ASSERT(!vm_cc_refinement_p(cc));
2080 const ID mid = vm_ci_mid(ci);
2081 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
2088 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2090 const int ccs_len = ccs->len;
2092 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2093 rb_vm_ccs_free(ccs);
2094 rb_id_table_delete(cc_tbl, mid);
2098 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2103 unsigned int argc = vm_ci_argc(ci);
2104 unsigned int flag = vm_ci_flag(ci);
2106 for (
int i=0; i<ccs_len; i++) {
2107 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2108 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2109 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2111 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2113 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2114 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2116 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2117 VM_ASSERT(ccs_cc->klass == klass);
2118 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2127 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2130 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2136 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2138 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2141 cme = rb_callable_method_entry(klass, mid);
2144 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2148 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2149 return &vm_empty_cc;
2152 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2157 VM_ASSERT(cc_tbl != NULL);
2159 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2165 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2169 cme = rb_check_overloaded_cme(cme, ci);
2171 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2172 vm_ccs_push(klass, ccs, ci, cc);
2174 VM_ASSERT(vm_cc_cme(cc) != NULL);
2175 VM_ASSERT(cme->called_id == mid);
2176 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2190 cc = vm_search_cc(klass, ci);
2193 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2194 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2195 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2196 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2197 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2207#if USE_DEBUG_COUNTER
2211 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2213#if OPT_INLINE_METHOD_CACHE
2217 if (cd_owner && cc != empty_cc) {
2221#if USE_DEBUG_COUNTER
2222 if (!old_cc || old_cc == empty_cc) {
2224 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2226 else if (old_cc == cc) {
2227 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2229 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2230 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2232 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2233 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2234 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2237 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2242 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2243 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2254#if OPT_INLINE_METHOD_CACHE
2255 if (LIKELY(vm_cc_class_check(cc, klass))) {
2256 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2257 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2258 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2259 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2260 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2261 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2265 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2268 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2272 return vm_search_method_slowpath0(cd_owner, cd, klass);
2279 VM_ASSERT(klass !=
Qfalse);
2282 return vm_search_method_fastpath(cd_owner, cd, klass);
2285#if __has_attribute(transparent_union)
2298 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2299 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2300 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2301 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2302 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2303 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2306# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2309# define make_cfunc_type(f) (cfunc_type)(f)
2319 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2320 VM_ASSERT(callable_method_entry_p(me));
2322 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2326#if __has_attribute(transparent_union)
2327 return me->def->body.cfunc.func == func.anyargs;
2329 return me->def->body.cfunc.func == func;
2338 return me && METHOD_ENTRY_BASIC(me);
2344 VM_ASSERT(iseq != NULL);
2346 return check_cfunc(vm_cc_cme(cc), func);
2349#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2350#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2352#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2384opt_equality_specialized(
VALUE recv,
VALUE obj)
2386 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2387 goto compare_by_identity;
2389 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2390 goto compare_by_identity;
2393 goto compare_by_identity;
2402#if MSC_VERSION_BEFORE(1300)
2406 else if (isnan(b)) {
2411 return RBOOL(a == b);
2418 return rb_str_eql_internal(obj, recv);
2423 compare_by_identity:
2424 return RBOOL(recv == obj);
2430 VM_ASSERT(cd_owner != NULL);
2432 VALUE val = opt_equality_specialized(recv, obj);
2433 if (!UNDEF_P(val))
return val;
2435 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2439 return RBOOL(recv == obj);
2443#undef EQ_UNREDEFINED_P
2446NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2449opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2451 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2453 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2454 return RBOOL(recv == obj);
2464 VALUE val = opt_equality_specialized(recv, obj);
2465 if (!UNDEF_P(val)) {
2469 return opt_equality_by_mid_slowpath(recv, obj, mid);
2476 return opt_equality_by_mid(obj1, obj2, idEq);
2482 return opt_equality_by_mid(obj1, obj2, idEqlP);
2492 case VM_CHECKMATCH_TYPE_WHEN:
2494 case VM_CHECKMATCH_TYPE_RESCUE:
2496 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2499 case VM_CHECKMATCH_TYPE_CASE: {
2500 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2503 rb_bug(
"check_match: unreachable");
2508#if MSC_VERSION_BEFORE(1300)
2509#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2511#define CHECK_CMP_NAN(a, b)
2515double_cmp_lt(
double a,
double b)
2517 CHECK_CMP_NAN(a, b);
2518 return RBOOL(a < b);
2522double_cmp_le(
double a,
double b)
2524 CHECK_CMP_NAN(a, b);
2525 return RBOOL(a <= b);
2529double_cmp_gt(
double a,
double b)
2531 CHECK_CMP_NAN(a, b);
2532 return RBOOL(a > b);
2536double_cmp_ge(
double a,
double b)
2538 CHECK_CMP_NAN(a, b);
2539 return RBOOL(a >= b);
2543static inline VALUE *
2548 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2549 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2551 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2552 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2553 int params = ISEQ_BODY(cfp->iseq)->param.size;
2556 bp += vm_ci_argc(ci);
2559 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2563#if VM_DEBUG_BP_CHECK
2564 if (bp != cfp->bp_check) {
2565 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2566 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2567 (
long)(bp - GET_EC()->vm_stack));
2568 rb_bug(
"vm_base_ptr: unreachable");
2581 return vm_base_ptr(cfp);
2596static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2601 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2603 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2609 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2612 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2613 int param = ISEQ_BODY(iseq)->param.size;
2614 int local = ISEQ_BODY(iseq)->local_table_size;
2615 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2621 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2622 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2623 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2624 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2625 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2626 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2627 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2628 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2632rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2634 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2635 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2636 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2637 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2638 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2639 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2640 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2641 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2645rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2647 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2648 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2649 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2650 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2651 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2652 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2653 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2656#define ALLOW_HEAP_ARGV (-2)
2657#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2662 vm_check_canary(GET_EC(), cfp->sp);
2668 int argc = calling->argc;
2670 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2674 VALUE *argv = cfp->sp - argc;
2675 VALUE argv_ary = rb_ary_hidden_new(
len + argc + 1);
2676 rb_ary_cat(argv_ary, argv, argc);
2677 rb_ary_cat(argv_ary, ptr,
len);
2678 cfp->sp -= argc - 1;
2679 cfp->sp[-1] = argv_ary;
2681 calling->heap_argv = argv_ary;
2687 if (max_args >= 0 &&
len + argc > max_args) {
2695 calling->argc +=
len - (max_args - argc + 1);
2696 len = max_args - argc + 1;
2705 calling->heap_argv = 0;
2707 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2709 for (i = 0; i <
len; i++) {
2710 *cfp->sp++ = ptr[i];
2722 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2723 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2724 const VALUE h = rb_hash_new_with_size(kw_len);
2725 VALUE *sp = cfp->sp;
2728 for (i=0; i<kw_len; i++) {
2729 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2733 cfp->sp -= kw_len - 1;
2734 calling->argc -= kw_len - 1;
2735 calling->kw_splat = 1;
2739vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2742 if (keyword_hash !=
Qnil) {
2744 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2747 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2753 keyword_hash = rb_hash_dup(keyword_hash);
2755 return keyword_hash;
2761 const struct rb_callinfo *restrict ci,
int max_args)
2763 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2764 if (IS_ARGS_KW_SPLAT(ci)) {
2766 VM_ASSERT(calling->kw_splat == 1);
2770 VALUE ary = cfp->sp[0];
2771 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2774 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2778 if (UNLIKELY(calling->heap_argv)) {
2779 rb_ary_push(calling->heap_argv, kwh);
2780 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2781 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2782 calling->kw_splat = 0;
2790 VM_ASSERT(calling->kw_splat == 1);
2794 calling->kw_splat = 0;
2799 VM_ASSERT(calling->kw_splat == 0);
2803 VALUE ary = cfp->sp[0];
2805 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2810 VALUE last_hash, argv_ary;
2811 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2812 if (!IS_ARGS_KEYWORD(ci) &&
2815 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2817 rb_ary_pop(argv_ary);
2819 rb_ary_push(argv_ary, rb_hash_dup(last_hash));
2820 calling->kw_splat = 1;
2826 if (!IS_ARGS_KEYWORD(ci) &&
2827 calling->argc > 0 &&
2829 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2836 cfp->sp[-1] = rb_hash_dup(last_hash);
2837 calling->kw_splat = 1;
2843 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2845 VM_ASSERT(calling->kw_splat == 1);
2846 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2851 calling->kw_splat = 0;
2857 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2859 VM_ASSERT(calling->kw_splat == 0);
2865 vm_caller_setup_arg_kw(cfp, calling, ci);
2869#define USE_OPT_HIST 0
2872#define OPT_HIST_MAX 64
2873static int opt_hist[OPT_HIST_MAX+1];
2877opt_hist_show_results_at_exit(
void)
2879 for (
int i=0; i<OPT_HIST_MAX; i++) {
2880 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2890 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2891 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2892 const int opt = calling->argc - lead_num;
2893 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2894 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2895 const int param = ISEQ_BODY(iseq)->param.size;
2896 const int local = ISEQ_BODY(iseq)->local_table_size;
2897 const int delta = opt_num - opt;
2899 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2902 if (opt_pc < OPT_HIST_MAX) {
2906 opt_hist[OPT_HIST_MAX]++;
2910 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2918 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2919 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2920 const int opt = calling->argc - lead_num;
2921 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2923 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2926 if (opt_pc < OPT_HIST_MAX) {
2930 opt_hist[OPT_HIST_MAX]++;
2934 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2939 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2940 VALUE *
const locals);
2947 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2948 int param_size = ISEQ_BODY(iseq)->param.size;
2949 int local_size = ISEQ_BODY(iseq)->local_table_size;
2952 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
2954 local_size = local_size + vm_ci_argc(calling->cd->ci);
2955 param_size = param_size + vm_ci_argc(calling->cd->ci);
2957 cfp->sp[0] = (
VALUE)calling->cd->ci;
2959 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
2969 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2970 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2972 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2973 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2975 const int ci_kw_len = kw_arg->keyword_len;
2976 const VALUE *
const ci_keywords = kw_arg->keywords;
2977 VALUE *argv = cfp->sp - calling->argc;
2978 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
2979 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2981 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
2982 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2984 int param = ISEQ_BODY(iseq)->param.size;
2985 int local = ISEQ_BODY(iseq)->local_table_size;
2986 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2993 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
2996 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2997 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2999 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3000 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3001 VALUE *
const argv = cfp->sp - calling->argc;
3002 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3005 for (i=0; i<kw_param->num; i++) {
3006 klocals[i] = kw_param->default_values[i];
3013 int param = ISEQ_BODY(iseq)->param.size;
3014 int local = ISEQ_BODY(iseq)->local_table_size;
3015 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3025 cfp->sp -= (calling->argc + 1);
3026 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3027 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3036 st_table *dup_check_table = vm->unused_block_warning_table;
3046 .v = (
VALUE)cme->def,
3050 if (!strict_unused_block) {
3051 key = (st_data_t)cme->def->original_id;
3053 if (st_lookup(dup_check_table, key, NULL)) {
3063 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3068 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3069 fprintf(stderr,
"key:%p\n", (
void *)key);
3073 if (st_insert(dup_check_table, key, 1)) {
3078 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3080 if (!
NIL_P(m_loc)) {
3081 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3085 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3092 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3097 VM_ASSERT((vm_ci_argc(ci), 1));
3098 VM_ASSERT(vm_cc_cme(cc) != NULL);
3100 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3101 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3102 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3103 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3106 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3107 if (LIKELY(rb_simple_iseq_p(iseq))) {
3109 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3110 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3112 if (calling->argc != lead_num) {
3113 argument_arity_error(ec, iseq, calling->argc, lead_num, lead_num);
3117 VM_ASSERT(cc == calling->cc);
3119 if (vm_call_iseq_optimizable_p(ci, cc)) {
3120 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3122 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3123 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3124 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3127 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3132 else if (rb_iseq_only_optparam_p(iseq)) {
3135 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3136 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3138 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3139 const int argc = calling->argc;
3140 const int opt = argc - lead_num;
3142 if (opt < 0 || opt > opt_num) {
3143 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
3146 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3147 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3148 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3149 vm_call_cacheable(ci, cc));
3152 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3153 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3154 vm_call_cacheable(ci, cc));
3158 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3159 for (
int i=argc; i<lead_num + opt_num; i++) {
3162 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3164 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3165 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3166 const int argc = calling->argc;
3167 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3169 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3172 if (argc - kw_arg->keyword_len == lead_num) {
3173 const int ci_kw_len = kw_arg->keyword_len;
3174 const VALUE *
const ci_keywords = kw_arg->keywords;
3176 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3178 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3179 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
3181 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3182 vm_call_cacheable(ci, cc));
3187 else if (argc == lead_num) {
3189 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3190 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
3192 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3194 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3195 vm_call_cacheable(ci, cc));
3221 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3222 bool can_fastpath =
true;
3224 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3226 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3227 ci = vm_ci_new_runtime(
3234 ci = forward_cd->caller_ci;
3236 can_fastpath =
false;
3240 if (!vm_ci_markable(ci)) {
3241 ci = vm_ci_new_runtime(
3246 can_fastpath =
false;
3248 argv[param_size - 1] = (
VALUE)ci;
3249 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3253 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3280 const VALUE * lep = VM_CF_LEP(cfp);
3286 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3291 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3295 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3297 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3298 VALUE * to = cfp->sp - 1;
3302 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3307 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3309 cfp->sp = to + argc;
3328 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3331 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3332 int param_size = ISEQ_BODY(iseq)->param.size;
3333 int local_size = ISEQ_BODY(iseq)->local_table_size;
3335 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3337 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3338 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3344 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3347 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3348 int param_size = ISEQ_BODY(iseq)->param.size;
3349 int local_size = ISEQ_BODY(iseq)->local_table_size;
3351 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3354 local_size = local_size + vm_ci_argc(calling->cd->ci);
3355 param_size = param_size + vm_ci_argc(calling->cd->ci);
3357 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3358 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3363 int opt_pc,
int param_size,
int local_size)
3368 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3369 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3372 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3378 int opt_pc,
int param_size,
int local_size)
3380 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3381 VALUE *argv = cfp->sp - calling->argc;
3382 VALUE *sp = argv + param_size;
3383 cfp->sp = argv - 1 ;
3385 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3386 calling->block_handler, (
VALUE)me,
3387 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3388 local_size - param_size,
3389 ISEQ_BODY(iseq)->stack_max);
3398 VALUE *argv = cfp->sp - calling->argc;
3400 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3401 VALUE *src_argv = argv;
3402 VALUE *sp_orig, *sp;
3403 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3405 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3406 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3407 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3408 dst_captured->code.val = src_captured->code.val;
3409 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3410 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3413 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3417 vm_pop_frame(ec, cfp, cfp->ep);
3420 sp_orig = sp = cfp->sp;
3423 sp[0] = calling->recv;
3427 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3428 *sp++ = src_argv[i];
3431 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3432 calling->recv, calling->block_handler, (
VALUE)me,
3433 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3434 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3435 ISEQ_BODY(iseq)->stack_max);
3443ractor_unsafe_check(
void)
3445 if (!rb_ractor_main_p()) {
3446 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3453 ractor_unsafe_check();
3461 ractor_unsafe_check();
3463 return (*f)(argc, argv, recv);
3469 ractor_unsafe_check();
3477 ractor_unsafe_check();
3479 return (*f)(recv, argv[0]);
3485 ractor_unsafe_check();
3487 return (*f)(recv, argv[0], argv[1]);
3493 ractor_unsafe_check();
3495 return (*f)(recv, argv[0], argv[1], argv[2]);
3501 ractor_unsafe_check();
3503 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3509 ractor_unsafe_check();
3510 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3511 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3517 ractor_unsafe_check();
3518 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3519 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3525 ractor_unsafe_check();
3526 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3527 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3533 ractor_unsafe_check();
3534 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3535 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3541 ractor_unsafe_check();
3542 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3543 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3549 ractor_unsafe_check();
3550 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3551 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3557 ractor_unsafe_check();
3558 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3559 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3565 ractor_unsafe_check();
3566 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3567 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3573 ractor_unsafe_check();
3574 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3575 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3581 ractor_unsafe_check();
3582 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3583 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3589 ractor_unsafe_check();
3590 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3591 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3605 return (*f)(argc, argv, recv);
3619 return (*f)(recv, argv[0]);
3626 return (*f)(recv, argv[0], argv[1]);
3633 return (*f)(recv, argv[0], argv[1], argv[2]);
3640 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3646 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3647 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3653 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3654 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3660 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3661 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3667 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3668 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3674 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3675 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3681 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3682 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3688 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3689 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3695 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3696 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3702 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3703 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3709 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3710 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3716 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3717 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3723 const int ov_flags = RAISED_STACKOVERFLOW;
3724 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3725 if (rb_ec_raised_p(ec, ov_flags)) {
3726 rb_ec_raised_reset(ec, ov_flags);
3732#define CHECK_CFP_CONSISTENCY(func) \
3733 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3734 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3740#if VM_DEBUG_VERIFY_METHOD_CACHE
3741 switch (me->def->type) {
3742 case VM_METHOD_TYPE_CFUNC:
3743 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3745# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3747 METHOD_BUG(ATTRSET);
3749 METHOD_BUG(BMETHOD);
3752 METHOD_BUG(OPTIMIZED);
3753 METHOD_BUG(MISSING);
3754 METHOD_BUG(REFINED);
3758 rb_bug(
"wrong method type: %d", me->def->type);
3761 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3768 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3775 VALUE recv = calling->recv;
3776 VALUE block_handler = calling->block_handler;
3777 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3779 if (UNLIKELY(calling->kw_splat)) {
3780 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3783 VM_ASSERT(reg_cfp == ec->cfp);
3785 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3788 vm_push_frame(ec, NULL, frame_type, recv,
3789 block_handler, (
VALUE)me,
3790 0, ec->cfp->sp, 0, 0);
3792 int len = cfunc->argc;
3795 reg_cfp->sp = stack_bottom;
3796 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3798 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3800 rb_vm_pop_frame(ec);
3802 VM_ASSERT(ec->cfp->sp == stack_bottom);
3804 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3805 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3815 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3817 VALUE *sp = ec->cfp->sp;
3818 VALUE recv = *(sp - recv_idx - 1);
3819 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3820 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3821#if VM_CHECK_MODE > 0
3823 *(GET_EC()->cfp->sp) =
Qfalse;
3825 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3830rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3832 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3838 int argc = calling->argc;
3839 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3840 VALUE *argv = &stack_bottom[1];
3842 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3849 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3851 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3853 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3854 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3857 VALUE *stack_bottom = reg_cfp->sp - 2;
3859 VM_ASSERT(calling->argc == 1);
3863 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3866 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3868 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3875 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3878 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3879 return vm_call_cfunc_other(ec, reg_cfp, calling);
3883 calling->kw_splat = 0;
3885 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3886 VALUE *sp = stack_bottom;
3887 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3888 for(i = 0; i < argc; i++) {
3893 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3899 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3900 VALUE argv_ary = reg_cfp->sp[-1];
3904 int argc_offset = 0;
3906 if (UNLIKELY(argc > 0 &&
3908 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3910 return vm_call_cfunc_other(ec, reg_cfp, calling);
3914 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3920 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3921 VALUE keyword_hash = reg_cfp->sp[-1];
3924 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3927 return vm_call_cfunc_other(ec, reg_cfp, calling);
3934 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3936 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3937 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3939 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3940 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3942 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3944 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
3945 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
3949 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
3950 return vm_call_cfunc_other(ec, reg_cfp, calling);
3957 RB_DEBUG_COUNTER_INC(ccf_ivar);
3959 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
3966 RB_DEBUG_COUNTER_INC(ccf_attrset);
3967 VALUE val = *(cfp->sp - 1);
3969 attr_index_t index = vm_cc_attr_index(cc);
3970 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3971 ID id = vm_cc_cme(cc)->def->body.attr.id;
3972 rb_check_frozen(obj);
3973 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
3982 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
3983 if (!UNDEF_P(res)) {
3988 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
3996 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4006 VALUE procv = cme->def->body.bmethod.proc;
4009 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4010 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4014 GetProcPtr(procv, proc);
4015 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4025 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4029 VALUE procv = cme->def->body.bmethod.proc;
4032 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4033 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4037 GetProcPtr(procv, proc);
4038 const struct rb_block *block = &proc->block;
4040 while (vm_block_type(block) == block_type_proc) {
4041 block = vm_proc_block(block->as.proc);
4043 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4046 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4047 VALUE *
const argv = cfp->sp - calling->argc;
4048 const int arg_size = ISEQ_BODY(iseq)->param.size;
4051 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4052 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4055 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4060 vm_push_frame(ec, iseq,
4061 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4063 VM_GUARDED_PREV_EP(captured->ep),
4065 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4067 ISEQ_BODY(iseq)->local_table_size - arg_size,
4068 ISEQ_BODY(iseq)->stack_max);
4076 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4080 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4081 if (UNLIKELY(calling->heap_argv)) {
4086 argc = calling->argc;
4089 cfp->sp += - argc - 1;
4092 return vm_call_bmethod_body(ec, calling, argv);
4098 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4102 VALUE procv = cme->def->body.bmethod.proc;
4104 GetProcPtr(procv, proc);
4105 const struct rb_block *block = &proc->block;
4107 while (vm_block_type(block) == block_type_proc) {
4108 block = vm_proc_block(block->as.proc);
4110 if (vm_block_type(block) == block_type_iseq) {
4111 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4112 return vm_call_iseq_bmethod(ec, cfp, calling);
4115 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4116 return vm_call_noniseq_bmethod(ec, cfp, calling);
4120rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4122 VALUE klass = current_class;
4130 while (
RTEST(klass)) {
4132 if (owner == target_owner) {
4138 return current_class;
4147 if (orig_me->defined_class == 0) {
4148 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4149 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4150 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4152 if (me->def->reference_count == 1) {
4153 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4157 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4165 VM_ASSERT(callable_method_entry_p(cme));
4172 return aliased_callable_method_entry(me);
4178 calling->cc = &VM_CC_ON_STACK(
Qundef,
4181 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4183 return vm_call_method_each_type(ec, cfp, calling);
4186static enum method_missing_reason
4189 enum method_missing_reason stat = MISSING_NOENTRY;
4190 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4191 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4192 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4202 ASSUME(calling->argc >= 0);
4204 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4205 int argc = calling->argc;
4206 VALUE recv = calling->recv;
4209 flags |= VM_CALL_OPT_SEND;
4211 if (UNLIKELY(! mid)) {
4212 mid = idMethodMissing;
4213 missing_reason = ci_missing_reason(ci);
4214 ec->method_missing_reason = missing_reason;
4217 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4218 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4219 rb_ary_unshift(argv_ary, symbol);
4222 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4223 VALUE exc = rb_make_no_method_exception(
4245 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4248 argc = ++calling->argc;
4250 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4253 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4254 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4255 VALUE exc = rb_make_no_method_exception(
4268 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4274 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4275 calling->cd = &new_fcd.cd;
4279 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4280 new_fcd.caller_ci = caller_ci;
4283 calling->cc = &VM_CC_ON_STACK(klass,
4285 { .method_missing_reason = missing_reason },
4286 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4288 if (flags & VM_CALL_FCALL) {
4289 return vm_call_method(ec, reg_cfp, calling);
4293 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4295 if (vm_cc_cme(cc) != NULL) {
4296 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4297 case METHOD_VISI_PUBLIC:
4298 return vm_call_method_each_type(ec, reg_cfp, calling);
4299 case METHOD_VISI_PRIVATE:
4300 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4302 case METHOD_VISI_PROTECTED:
4303 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4306 VM_UNREACHABLE(vm_call_method);
4308 return vm_call_method_missing(ec, reg_cfp, calling);
4311 return vm_call_method_nome(ec, reg_cfp, calling);
4321 i = calling->argc - 1;
4323 if (calling->argc == 0) {
4324 rb_raise(rb_eArgError,
"no method name given");
4348 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4354 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4356 int flags = VM_CALL_FCALL;
4360 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4361 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4362 sym = rb_ary_shift(argv_ary);
4363 flags |= VM_CALL_ARGS_SPLAT;
4364 if (calling->kw_splat) {
4365 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4366 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4367 calling->kw_splat = 0;
4369 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4372 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4373 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4379 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4380 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4386 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4389 int flags = vm_ci_flag(ci);
4391 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4392 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4393 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4394 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4395 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4396 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4399 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4400 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4405 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4407 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4409 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4410 unsigned int argc, flag;
4412 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4413 argc = ++calling->argc;
4416 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4417 vm_check_canary(ec, reg_cfp->sp);
4421 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4424 ec->method_missing_reason = reason;
4428 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4434 if (!(flag & VM_CALL_FORWARDING)) {
4435 calling->cd = &new_fcd.cd;
4439 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4440 new_fcd.caller_ci = caller_ci;
4444 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4445 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4446 return vm_call_method(ec, reg_cfp, calling);
4452 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4463 return vm_call_method_nome(ec, cfp, calling);
4465 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4466 cme->def->body.refined.orig_me) {
4467 cme = refined_method_callable_without_refinement(cme);
4470 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4472 return vm_call_method_each_type(ec, cfp, calling);
4476find_refinement(
VALUE refinements,
VALUE klass)
4478 if (
NIL_P(refinements)) {
4481 return rb_hash_lookup(refinements, klass);
4490 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4491 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4494 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4495 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4499 }
while (cfp->iseq != local_iseq);
4510 if (orig_me->defined_class == 0) {
4518 VM_ASSERT(callable_method_entry_p(cme));
4520 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4530 ID mid = vm_ci_mid(calling->cd->ci);
4531 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4535 for (; cref; cref = CREF_NEXT(cref)) {
4536 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4537 if (
NIL_P(refinement))
continue;
4540 rb_callable_method_entry(refinement, mid);
4543 if (vm_cc_call(cc) == vm_call_super_method) {
4546 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4551 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4552 cme->def != ref_me->def) {
4555 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4564 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4565 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4580 if (calling->cd->cc) {
4581 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4583 return vm_call_method(ec, cfp, calling);
4586 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4587 calling->cc= ref_cc;
4588 return vm_call_method(ec, cfp, calling);
4592 return vm_call_method_nome(ec, cfp, calling);
4598NOINLINE(
static VALUE
4606 int argc = calling->argc;
4609 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4612 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4618 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4621 VALUE procval = calling->recv;
4622 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4628 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4630 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4633 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4634 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4637 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4638 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4639 return vm_call_general(ec, reg_cfp, calling);
4646 VALUE recv = calling->recv;
4649 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4650 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4652 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4653 return internal_RSTRUCT_GET(recv,
off);
4659 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4661 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4669 VALUE recv = calling->recv;
4672 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4673 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4675 rb_check_frozen(recv);
4677 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4678 internal_RSTRUCT_SET(recv,
off, val);
4686 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4688 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4696#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4697 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4698 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4699 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4701 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4702 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4713 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4714 case OPTIMIZED_METHOD_TYPE_SEND:
4715 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4716 return vm_call_opt_send(ec, cfp, calling);
4717 case OPTIMIZED_METHOD_TYPE_CALL:
4718 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4719 return vm_call_opt_call(ec, cfp, calling);
4720 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4721 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4722 return vm_call_opt_block_call(ec, cfp, calling);
4723 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4724 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4728 VM_CALL_METHOD_ATTR(v,
4729 vm_call_opt_struct_aref(ec, cfp, calling),
4730 set_vm_cc_ivar(cc); \
4731 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4734 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4735 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4739 VM_CALL_METHOD_ATTR(v,
4740 vm_call_opt_struct_aset(ec, cfp, calling),
4741 set_vm_cc_ivar(cc); \
4742 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4746 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4758 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4760 switch (cme->def->type) {
4761 case VM_METHOD_TYPE_ISEQ:
4762 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4763 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4764 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4767 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4768 return vm_call_iseq_setup(ec, cfp, calling);
4771 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4772 case VM_METHOD_TYPE_CFUNC:
4773 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4774 return vm_call_cfunc(ec, cfp, calling);
4776 case VM_METHOD_TYPE_ATTRSET:
4777 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4781 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4783 if (vm_cc_markable(cc)) {
4784 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4785 VM_CALL_METHOD_ATTR(v,
4786 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4787 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4793 VM_CALLCACHE_UNMARKABLE |
4794 VM_CALLCACHE_ON_STACK,
4800 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
4805 VM_CALL_METHOD_ATTR(v,
4806 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4807 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4811 case VM_METHOD_TYPE_IVAR:
4812 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4814 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4815 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4816 VM_CALL_METHOD_ATTR(v,
4817 vm_call_ivar(ec, cfp, calling),
4818 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4821 case VM_METHOD_TYPE_MISSING:
4822 vm_cc_method_missing_reason_set(cc, 0);
4823 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4824 return vm_call_method_missing(ec, cfp, calling);
4826 case VM_METHOD_TYPE_BMETHOD:
4827 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4828 return vm_call_bmethod(ec, cfp, calling);
4830 case VM_METHOD_TYPE_ALIAS:
4831 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4832 return vm_call_alias(ec, cfp, calling);
4834 case VM_METHOD_TYPE_OPTIMIZED:
4835 return vm_call_optimized(ec, cfp, calling, ci, cc);
4837 case VM_METHOD_TYPE_UNDEF:
4840 case VM_METHOD_TYPE_ZSUPER:
4841 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4843 case VM_METHOD_TYPE_REFINED:
4846 return vm_call_refined(ec, cfp, calling);
4849 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4859 const int stat = ci_missing_reason(ci);
4861 if (vm_ci_mid(ci) == idMethodMissing) {
4862 if (UNLIKELY(calling->heap_argv)) {
4867 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4868 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4872 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4884 VALUE defined_class = me->defined_class;
4885 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4886 return NIL_P(refined_class) ? defined_class : refined_class;
4895 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4897 if (vm_cc_cme(cc) != NULL) {
4898 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4899 case METHOD_VISI_PUBLIC:
4900 return vm_call_method_each_type(ec, cfp, calling);
4902 case METHOD_VISI_PRIVATE:
4903 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4904 enum method_missing_reason stat = MISSING_PRIVATE;
4905 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4907 vm_cc_method_missing_reason_set(cc, stat);
4908 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4909 return vm_call_method_missing(ec, cfp, calling);
4911 return vm_call_method_each_type(ec, cfp, calling);
4913 case METHOD_VISI_PROTECTED:
4914 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4915 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4917 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4918 return vm_call_method_missing(ec, cfp, calling);
4922 VM_ASSERT(vm_cc_cme(cc) != NULL);
4925 calling->cc = &cc_on_stack;
4926 return vm_call_method_each_type(ec, cfp, calling);
4929 return vm_call_method_each_type(ec, cfp, calling);
4932 rb_bug(
"unreachable");
4936 return vm_call_method_nome(ec, cfp, calling);
4943 RB_DEBUG_COUNTER_INC(ccf_general);
4944 return vm_call_method(ec, reg_cfp, calling);
4950 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4951 VM_ASSERT(cc != vm_cc_empty());
4953 *(vm_call_handler *)&cc->call_ = vm_call_general;
4959 RB_DEBUG_COUNTER_INC(ccf_super_method);
4964 if (ec == NULL) rb_bug(
"unreachable");
4967 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4968 return vm_call_method(ec, reg_cfp, calling);
4974vm_search_normal_superclass(
VALUE klass)
4979 klass =
RBASIC(klass)->klass;
4981 klass = RCLASS_ORIGIN(klass);
4985NORETURN(
static void vm_super_outside(
void));
4988vm_super_outside(
void)
4994empty_cc_for_super(
void)
4996 return &vm_empty_cc_for_super;
5002 VALUE current_defined_class;
5009 current_defined_class = vm_defined_class_for_protected_call(me);
5012 reg_cfp->iseq != method_entry_iseqptr(me) &&
5015 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5019 "self has wrong type to call super in this context: "
5020 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5025 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5027 "implicit argument passing of super from method defined"
5028 " by define_method() is not supported."
5029 " Specify all arguments explicitly.");
5032 ID mid = me->def->original_id;
5034 if (!vm_ci_markable(cd->ci)) {
5035 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5039 cd->ci = vm_ci_new_runtime(mid,
5042 vm_ci_kwarg(cd->ci));
5049 VALUE klass = vm_search_normal_superclass(me->defined_class);
5053 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5057 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5061 if (cached_cme == NULL) {
5063 cd->cc = empty_cc_for_super();
5065 else if (cached_cme->called_id != mid) {
5068 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5072 cd->cc = cc = empty_cc_for_super();
5076 switch (cached_cme->def->type) {
5078 case VM_METHOD_TYPE_REFINED:
5080 case VM_METHOD_TYPE_ATTRSET:
5081 case VM_METHOD_TYPE_IVAR:
5082 vm_cc_call_set(cc, vm_call_super_method);
5090 VM_ASSERT((vm_cc_cme(cc),
true));
5098block_proc_is_lambda(
const VALUE procval)
5103 GetProcPtr(procval, proc);
5104 return proc->is_lambda;
5114 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5117 int is_lambda = FALSE;
5118 VALUE val, arg, blockarg;
5120 const struct vm_ifunc *ifunc = captured->code.ifunc;
5125 else if (argc == 0) {
5132 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5134 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5136 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5139 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5142 VM_GUARDED_PREV_EP(captured->ep),
5144 0, ec->cfp->sp, 0, 0);
5145 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5146 rb_vm_pop_frame(ec);
5154 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5160 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5169 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5171 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5179vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5181 VALUE ary, arg0 = argv[0];
5182 ary = rb_check_array_type(arg0);
5186 VM_ASSERT(argv[0] == arg0);
5194 if (rb_simple_iseq_p(iseq)) {
5198 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5200 if (arg_setup_type == arg_setup_block &&
5201 calling->argc == 1 &&
5202 ISEQ_BODY(iseq)->param.flags.has_lead &&
5203 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5204 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5205 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5208 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5209 if (arg_setup_type == arg_setup_block) {
5210 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5212 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5213 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5214 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5216 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5217 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5221 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5228 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5237 calling = &calling_entry;
5238 calling->argc = argc;
5239 calling->block_handler = block_handler;
5240 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5242 calling->heap_argv = 0;
5243 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5245 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5253 bool is_lambda,
VALUE block_handler)
5256 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5257 const int arg_size = ISEQ_BODY(iseq)->param.size;
5258 VALUE *
const rsp = GET_SP() - calling->argc;
5259 VALUE *
const argv = rsp;
5260 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5264 vm_push_frame(ec, iseq,
5265 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
5267 VM_GUARDED_PREV_EP(captured->ep), 0,
5268 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5270 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5278 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5280 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5281 int flags = vm_ci_flag(ci);
5283 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5284 ((calling->argc == 0) ||
5285 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5286 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5287 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5288 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5290 if (UNLIKELY(calling->heap_argv)) {
5291#if VM_ARGC_STACK_MAX < 0
5293 rb_raise(rb_eArgError,
"no receiver given");
5296 calling->recv = rb_ary_shift(calling->heap_argv);
5299 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5300 reg_cfp->sp[-2] = calling->recv;
5301 flags |= VM_CALL_ARGS_SPLAT;
5304 if (calling->argc < 1) {
5305 rb_raise(rb_eArgError,
"no receiver given");
5307 calling->recv = TOPN(--calling->argc);
5309 if (calling->kw_splat) {
5310 flags |= VM_CALL_KW_SPLAT;
5314 if (calling->argc < 1) {
5315 rb_raise(rb_eArgError,
"no receiver given");
5317 calling->recv = TOPN(--calling->argc);
5320 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5326 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5331 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5332 argc = calling->argc;
5333 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5339vm_proc_to_block_handler(
VALUE procval)
5341 const struct rb_block *block = vm_proc_block(procval);
5343 switch (vm_block_type(block)) {
5344 case block_type_iseq:
5345 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5346 case block_type_ifunc:
5347 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5348 case block_type_symbol:
5349 return VM_BH_FROM_SYMBOL(block->as.symbol);
5350 case block_type_proc:
5351 return VM_BH_FROM_PROC(block->as.proc);
5353 VM_UNREACHABLE(vm_yield_with_proc);
5360 bool is_lambda,
VALUE block_handler)
5362 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5363 VALUE proc = VM_BH_TO_PROC(block_handler);
5364 is_lambda = block_proc_is_lambda(proc);
5365 block_handler = vm_proc_to_block_handler(proc);
5368 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5374 bool is_lambda,
VALUE block_handler)
5378 bool is_lambda,
VALUE block_handler);
5380 switch (vm_block_handler_type(block_handler)) {
5381 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5382 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5383 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5384 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5385 default: rb_bug(
"vm_invoke_block: unreachable");
5388 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5392vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5399 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5402 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5403 captured->code.iseq = blockiseq;
5405 return rb_vm_make_proc(ec, captured,
rb_cProc);
5409vm_once_exec(
VALUE iseq)
5416vm_once_clear(
VALUE data)
5419 is->once.running_thread = NULL;
5431 args[0] = obj; args[1] =
Qfalse;
5433 if (!UNDEF_P(r) &&
RTEST(r)) {
5445 enum defined_type
type = (
enum defined_type)op_type;
5452 return rb_gvar_defined(
SYM2ID(obj));
5454 case DEFINED_CVAR: {
5455 const rb_cref_t *cref = vm_get_cref(GET_EP());
5456 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5461 case DEFINED_CONST_FROM: {
5462 bool allow_nil =
type == DEFINED_CONST;
5464 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5469 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5471 case DEFINED_METHOD:{
5476 switch (METHOD_ENTRY_VISI(me)) {
5477 case METHOD_VISI_PRIVATE:
5479 case METHOD_VISI_PROTECTED:
5483 case METHOD_VISI_PUBLIC:
5487 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5491 return check_respond_to_missing(obj, v);
5496 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5500 case DEFINED_ZSUPER:
5505 VALUE klass = vm_search_normal_superclass(me->defined_class);
5506 if (!klass)
return false;
5508 ID id = me->def->original_id;
5515 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5517 rb_bug(
"unimplemented defined? type (VM)");
5527 return vm_defined(ec, reg_cfp, op_type, obj, v);
5531vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5534 const VALUE *ep = reg_ep;
5535 for (i = 0; i < lv; i++) {
5536 ep = GET_PREV_EP(ep);
5542vm_get_special_object(
const VALUE *
const reg_ep,
5543 enum vm_special_object_type
type)
5546 case VM_SPECIAL_OBJECT_VMCORE:
5547 return rb_mRubyVMFrozenCore;
5548 case VM_SPECIAL_OBJECT_CBASE:
5549 return vm_get_cbase(reg_ep);
5550 case VM_SPECIAL_OBJECT_CONST_BASE:
5551 return vm_get_const_base(reg_ep);
5553 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5560 const VALUE ary2 = ary2st;
5561 VALUE tmp1 = rb_check_to_array(ary1);
5562 VALUE tmp2 = rb_check_to_array(ary2);
5568 tmp1 = rb_ary_dup(ary1);
5572 return rb_ary_push(tmp1, ary2);
5575 return rb_ary_concat(tmp1, tmp2);
5583 const VALUE ary2 = ary2st;
5585 if (
NIL_P(ary2))
return ary1;
5587 VALUE tmp2 = rb_check_to_array(ary2);
5590 return rb_ary_push(ary1, ary2);
5593 return rb_ary_concat(ary1, tmp2);
5602 return vm_concat_array(ary1, ary2st);
5606rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5608 return vm_concat_to_array(ary1, ary2st);
5615 return RTEST(flag) ? rb_ary_new() : rb_cArray_empty_frozen;
5617 VALUE tmp = rb_check_to_array(ary);
5621 else if (
RTEST(flag)) {
5622 return rb_ary_dup(tmp);
5634 return vm_splat_array(flag, ary);
5640 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5642 if (flag & VM_CHECKMATCH_ARRAY) {
5646 for (i = 0; i < n; i++) {
5648 VALUE c = check_match(ec, v, target,
type);
5657 return check_match(ec, pattern, target,
type);
5664 return vm_check_match(ec, target, pattern, flag);
5668vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5670 const VALUE kw_bits = *(ep - bits);
5673 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5674 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5687 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5688 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5689 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5690 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5694 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5697 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5700 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5703 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5710vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5715 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5716 return rb_public_const_get_at(cbase,
id);
5724vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5729 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5734 "superclass mismatch for class %"PRIsVALUE
"",
5747vm_check_if_module(
ID id,
VALUE mod)
5766vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5769 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5777vm_declare_module(
ID id,
VALUE cbase)
5783NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5787 VALUE name = rb_id2str(
id);
5788 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5790 VALUE location = rb_const_source_location_at(cbase,
id);
5791 if (!
NIL_P(location)) {
5792 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5793 " previous definition of %"PRIsVALUE
" was here",
5794 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
5800vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5804 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5806 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5810 vm_check_if_namespace(cbase);
5814 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5815 if (!vm_check_if_class(
id, flags, super, klass))
5816 unmatched_redefinition(
"class", cbase,
id, klass);
5820 return vm_declare_class(
id, flags, cbase, super);
5825vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5829 vm_check_if_namespace(cbase);
5830 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5831 if (!vm_check_if_module(
id, mod))
5832 unmatched_redefinition(
"module", cbase,
id, mod);
5836 return vm_declare_module(
id, cbase);
5841vm_find_or_create_class_by_id(
ID id,
5846 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5849 case VM_DEFINECLASS_TYPE_CLASS:
5851 return vm_define_class(
id, flags, cbase, super);
5853 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5857 case VM_DEFINECLASS_TYPE_MODULE:
5859 return vm_define_module(
id, flags, cbase);
5862 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5866static rb_method_visibility_t
5871 if (!vm_env_cref_by_cref(cfp->ep)) {
5872 return METHOD_VISI_PUBLIC;
5875 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5884 if (!vm_env_cref_by_cref(cfp->ep)) {
5888 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5896 rb_method_visibility_t visi;
5901 visi = METHOD_VISI_PUBLIC;
5904 klass = CREF_CLASS_FOR_DEFINITION(cref);
5905 visi = vm_scope_visibility_get(ec);
5912 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
5916 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval);
5919 if (!is_singleton && vm_scope_module_func_check(ec)) {
5921 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5931 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5933 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5934 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
5937 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
5941enum method_explorer_type {
5943 mexp_search_invokeblock,
5952 VALUE block_handler,
5953 enum method_explorer_type method_explorer
5958 int argc = vm_ci_argc(ci);
5959 VALUE recv = TOPN(argc);
5961 .block_handler = block_handler,
5962 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5968 switch (method_explorer) {
5969 case mexp_search_method:
5970 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
5971 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5973 case mexp_search_super:
5974 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5975 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5977 case mexp_search_invokeblock:
5978 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5995 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
5996 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
5998 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6000 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6005 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6006 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6017 VALUE bh = VM_BLOCK_HANDLER_NONE;
6018 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6033 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6034 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6036 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6038 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6043 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6044 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6055 VALUE bh = VM_BLOCK_HANDLER_NONE;
6056 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6084 if (check_method_basic_definition(vm_cc_cme(cc))) {
6093 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6099 val = rb_mod_to_s(recv);
6105 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6106 return rb_nil_to_s(recv);
6110 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6111 return rb_true_to_s(recv);
6115 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6116 return rb_false_to_s(recv);
6120 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6121 return rb_fix_to_s(recv);
6129vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6131 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6140vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6142 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6151vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6153 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6167 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6168 return rb_ary_includes(ary, target);
6171 VALUE args[1] = {target};
6174 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6175 VALUE dupary = rb_ary_resurrect(ary);
6177 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6184 return vm_opt_duparray_include_p(ec, ary, target);
6190 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6195 VALUE result = *ptr;
6196 rb_snum_t i = num - 1;
6198 const VALUE v = *++ptr;
6199 if (OPTIMIZED_CMP(v, result) > 0) {
6214 return vm_opt_newarray_max(ec, num, ptr);
6220 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6225 VALUE result = *ptr;
6226 rb_snum_t i = num - 1;
6228 const VALUE v = *++ptr;
6229 if (OPTIMIZED_CMP(v, result) < 0) {
6244 return vm_opt_newarray_min(ec, num, ptr);
6251 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6252 return rb_ary_hash_values(num, ptr);
6262 return vm_opt_newarray_hash(ec, num, ptr);
6271 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6273 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6274 return rb_ary_includes(
ary, target);
6277 VALUE args[1] = {target};
6285 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6291 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6293 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6294 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6304 if (!UNDEF_P(buffer)) {
6305 args[1] = rb_hash_new_with_size(1);
6306 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6311 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6318 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6324 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6330vm_track_constant_cache(
ID id,
void *ic)
6333 struct rb_id_table *const_cache = vm->constant_cache;
6334 VALUE lookup_result;
6337 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6341 ics = st_init_numtable();
6342 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6357 vm->inserting_constant_cache_id = id;
6359 st_insert(ics, (st_data_t) ic, (st_data_t)
Qtrue);
6361 vm->inserting_constant_cache_id = (
ID)0;
6369 for (
int i = 0; segments[i]; i++) {
6370 ID id = segments[i];
6371 if (
id == idNULL)
continue;
6372 vm_track_constant_cache(
id, ic);
6382 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6383 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6385 return (ic_cref == NULL ||
6386 ic_cref == vm_get_cref(reg_ep));
6394 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6395 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6400rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6402 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6408 if (ruby_vm_const_missing_count > 0) {
6409 ruby_vm_const_missing_count = 0;
6416 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6421 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6422 rb_yjit_constant_ic_update(iseq, ic, pos);
6431 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6434 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6437 ruby_vm_constant_cache_misses++;
6438 val = vm_get_ev_const_chain(ec, segments);
6439 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6442 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6454 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6455 return is->once.value;
6457 else if (is->once.running_thread == NULL) {
6459 is->once.running_thread = th;
6463 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6466 else if (is->once.running_thread == th) {
6468 return vm_once_exec((
VALUE)iseq);
6472 RUBY_VM_CHECK_INTS(ec);
6479vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6481 switch (OBJ_BUILTIN_TYPE(key)) {
6487 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6488 SYMBOL_REDEFINED_OP_FLAG |
6489 INTEGER_REDEFINED_OP_FLAG |
6490 FLOAT_REDEFINED_OP_FLAG |
6491 NIL_REDEFINED_OP_FLAG |
6492 TRUE_REDEFINED_OP_FLAG |
6493 FALSE_REDEFINED_OP_FLAG |
6494 STRING_REDEFINED_OP_FLAG)) {
6498 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6502 if (rb_hash_stlike_lookup(hash, key, &val)) {
6522 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6523 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6524 static const char stack_consistency_error[] =
6525 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6526#if defined RUBY_DEVEL
6527 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6532 rb_bug(stack_consistency_error, nsp, nbp);
6539 if (FIXNUM_2_P(recv, obj) &&
6540 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6541 return rb_fix_plus_fix(recv, obj);
6543 else if (FLONUM_2_P(recv, obj) &&
6544 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6552 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6557 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6558 return rb_str_opt_plus(recv, obj);
6562 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6563 return rb_ary_plus(recv, obj);
6573 if (FIXNUM_2_P(recv, obj) &&
6574 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6575 return rb_fix_minus_fix(recv, obj);
6577 else if (FLONUM_2_P(recv, obj) &&
6578 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6586 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6597 if (FIXNUM_2_P(recv, obj) &&
6598 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6599 return rb_fix_mul_fix(recv, obj);
6601 else if (FLONUM_2_P(recv, obj) &&
6602 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6610 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6621 if (FIXNUM_2_P(recv, obj) &&
6622 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6623 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6625 else if (FLONUM_2_P(recv, obj) &&
6626 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6627 return rb_flo_div_flo(recv, obj);
6634 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6635 return rb_flo_div_flo(recv, obj);
6645 if (FIXNUM_2_P(recv, obj) &&
6646 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6647 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6649 else if (FLONUM_2_P(recv, obj) &&
6650 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6658 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6669 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6670 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6672 if (!UNDEF_P(val)) {
6673 return RBOOL(!
RTEST(val));
6683 if (FIXNUM_2_P(recv, obj) &&
6684 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6687 else if (FLONUM_2_P(recv, obj) &&
6688 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6696 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6708 if (FIXNUM_2_P(recv, obj) &&
6709 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6712 else if (FLONUM_2_P(recv, obj) &&
6713 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6721 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6733 if (FIXNUM_2_P(recv, obj) &&
6734 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6737 else if (FLONUM_2_P(recv, obj) &&
6738 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6746 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6758 if (FIXNUM_2_P(recv, obj) &&
6759 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6762 else if (FLONUM_2_P(recv, obj) &&
6763 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6771 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6788 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6797 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6798 return rb_ary_push(recv, obj);
6815 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6826 if (FIXNUM_2_P(recv, obj) &&
6827 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6839 if (FIXNUM_2_P(recv, obj) &&
6840 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6841 return rb_fix_aref(recv, obj);
6846 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6848 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6851 return rb_ary_aref1(recv, obj);
6855 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6856 return rb_hash_aref(recv, obj);
6870 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6872 rb_ary_store(recv,
FIX2LONG(obj), set);
6876 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6877 rb_hash_aset(recv, obj, set);
6889 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6890 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6891 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6892 return rb_hash_aref(recv, key);
6902 return vm_opt_aref_with(recv, key);
6909 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6910 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6911 return rb_hash_aset(recv, key, val);
6919vm_opt_length(
VALUE recv,
int bop)
6925 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6926 if (bop == BOP_EMPTY_P) {
6927 return LONG2NUM(RSTRING_LEN(recv));
6934 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6938 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6947vm_opt_empty_p(
VALUE recv)
6949 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
6962 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
6965 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
6981 case RSHIFT(~0UL, 1):
6984 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7002vm_opt_succ(
VALUE recv)
7005 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7006 return fix_succ(recv);
7012 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7023 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7024 return RBOOL(!
RTEST(recv));
7039 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7043 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7061 VALUE self = GET_SELF();
7063 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7065 if (event & global_hooks->events) {
7068 vm_dtrace(event, ec);
7069 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7075 if (local_hooks != NULL) {
7076 if (event & local_hooks->events) {
7079 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7085#define VM_TRACE_HOOK(target_event, val) do { \
7086 if ((pc_events & (target_event)) & enabled_flags) { \
7087 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7094 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7095 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7096 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7102 const VALUE *pc = reg_cfp->pc;
7103 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7106 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7112 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7115 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7116 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7120 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7121 enabled_flags |= iseq_local_events;
7123 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7125 if (bmethod_frame) {
7127 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7128 bmethod_local_hooks = me->def->body.bmethod.hooks;
7129 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7130 if (bmethod_local_hooks) {
7131 bmethod_local_events = bmethod_local_hooks->events;
7136 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7140 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7148 else if (ec->trace_arg != NULL) {
7156 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7159 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7162 RSTRING_PTR(rb_iseq_path(iseq)),
7163 (
int)rb_iseq_line_no(iseq, pos),
7164 RSTRING_PTR(rb_iseq_label(iseq)));
7166 VM_ASSERT(reg_cfp->pc == pc);
7167 VM_ASSERT(pc_events != 0);
7177 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7178 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7196#if VM_CHECK_MODE > 0
7197NORETURN( NOINLINE( COLDFUNC
7198void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7201Init_vm_stack_canary(
void)
7204 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7205 vm_stack_canary |= 0x01;
7207 vm_stack_canary_was_born =
true;
7212rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7216 const char *insn = rb_insns_name(i);
7220 rb_bug(
"dead canary found at %s: %s", insn, str);
7224void Init_vm_stack_canary(
void) { }
7256 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7263 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7270 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7277 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7284 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7291 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7298 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7305 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7312 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7318 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7319 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7325 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7326 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7332 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7333 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7339 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7340 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7346 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7347 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7353 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7354 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7360 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7361 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7366static builtin_invoker
7367lookup_builtin_invoker(
int argc)
7369 static const builtin_invoker invokers[] = {
7388 return invokers[argc];
7394 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7395 SETUP_CANARY(canary_p);
7396 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7397 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7398 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7405 return invoke_bf(ec, cfp, bf, argv);
7412 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7413 for (
int i=0; i<bf->argc; i++) {
7414 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7416 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7417 (
void *)(uintptr_t)bf->func_ptr);
7420 if (bf->argc == 0) {
7421 return invoke_bf(ec, cfp, bf, NULL);
7424 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7425 return invoke_bf(ec, cfp, bf, argv);
7435 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
#define REALLOC_N
Old name of RB_REALLOC_N.
#define ALLOC
Old name of RB_ALLOC.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define ALLOC_N
Old name of RB_ALLOC_N.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.