11#include "ruby/internal/config.h"
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
20#include "debug_counter.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/set_table.h"
30#include "internal/struct.h"
35#include "insns_info.inc"
41 int argc,
const VALUE *argv,
int priv);
51ruby_vm_special_exception_copy(
VALUE exc)
54 rb_obj_copy_ivar(e, exc);
62 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
63 ec->raised_flag = RAISED_STACKOVERFLOW;
65 VALUE at = rb_ec_backtrace_object(ec);
66 mesg = ruby_vm_special_exception_copy(mesg);
71 EC_JUMP_TAG(ec, TAG_RAISE);
74NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(), TRUE);
86 rb_bug(
"system stack overflow during GC. Faulty native extension?");
88 if (crit >= rb_stack_overflow_fatal) {
89 ec->raised_flag = RAISED_STACKOVERFLOW;
90 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
91 EC_JUMP_TAG(ec, TAG_RAISE);
93 ec_stack_overflow(ec, crit < rb_stack_overflow_signal);
100callable_class_p(
VALUE klass)
102#if VM_CHECK_MODE >= 2
103 if (!klass)
return FALSE;
131 VM_ASSERT(IMEMO_TYPE_P((
VALUE)cme, imemo_ment),
"imemo_type:%s", rb_imemo_name(imemo_type((
VALUE)cme)));
133 if (callable_class_p(cme->defined_class)) {
143vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
145 unsigned int magic = (
unsigned int)(
type & VM_FRAME_MAGIC_MASK);
146 enum imemo_type cref_or_me_type = imemo_env;
149 cref_or_me_type = imemo_type(cref_or_me);
151 if (
type & VM_FRAME_FLAG_BMETHOD) {
155 if (req_block && (
type & VM_ENV_FLAG_LOCAL) == 0) {
156 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
158 if (!req_block && (
type & VM_ENV_FLAG_LOCAL) != 0) {
159 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
163 if (cref_or_me_type != imemo_ment) {
164 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
168 if (req_cref && cref_or_me_type != imemo_cref) {
169 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
172 if (cref_or_me !=
Qfalse && cref_or_me_type != imemo_cref) {
173 if (((
type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC || magic == VM_FRAME_MAGIC_DUMMY) && (cref_or_me_type == imemo_ment)) {
177 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
183 if (cref_or_me_type == imemo_ment) {
186 if (!callable_method_entry_p(me)) {
187 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
191 if ((
type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
192 VM_ASSERT(iseq == NULL ||
194 RUBY_VM_NORMAL_ISEQ_P(iseq)
198 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
208 VALUE given_magic =
type & VM_FRAME_MAGIC_MASK;
211#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
213 vm_check_frame_detail(type, req_block, req_me, req_cref, \
214 specval, cref_or_me, is_cframe, iseq); \
216 switch (given_magic) {
218 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
219 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
220 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
221 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
222 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
223 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
224 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
225 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
226 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
228 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
233static VALUE vm_stack_canary;
234static bool vm_stack_canary_was_born =
false;
241 unsigned int pos = 0;
242 while (pos < ISEQ_BODY(iseq)->iseq_size) {
243 int opcode = rb_vm_insn_addr2opcode((
void *)ISEQ_BODY(iseq)->iseq_encoded[pos]);
244 unsigned int next_pos = pos + insn_len(opcode);
245 if (ISEQ_BODY(iseq)->iseq_encoded + next_pos == pc) {
250 rb_bug(
"failed to find the previous insn");
259 if (! LIKELY(vm_stack_canary_was_born)) {
262 else if ((
VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
266 else if (! (iseq = GET_ISEQ())) {
269 else if (LIKELY(sp[0] != vm_stack_canary)) {
278 const VALUE *orig = rb_iseq_original_iseq(iseq);
279 const VALUE iseqw = rb_iseqw_new(iseq);
281 const char *stri = rb_str_to_cstr(inspection);
282 const VALUE disasm = rb_iseq_disasm(iseq);
283 const char *strd = rb_str_to_cstr(disasm);
284 const ptrdiff_t pos = previous_insn_index(iseq, GET_PC());
285 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
286 const char *name = insn_name(insn);
292 "We are killing the stack canary set by %s, "
293 "at %s@pc=%"PRIdPTR
"\n"
294 "watch out the C stack trace.\n"
296 name, stri, pos, strd);
297 rb_bug(
"see above.");
299#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
302#define vm_check_canary(ec, sp)
303#define vm_check_frame(a, b, c, d)
308vm_push_frame_debug_counter_inc(
315 RB_DEBUG_COUNTER_INC(frame_push);
317 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
318 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
319 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
322 RB_DEBUG_COUNTER_INC(frame_R2R);
325 RB_DEBUG_COUNTER_INC(frame_R2C);
330 RB_DEBUG_COUNTER_INC(frame_C2R);
333 RB_DEBUG_COUNTER_INC(frame_C2C);
338 switch (
type & VM_FRAME_MAGIC_MASK) {
339 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method);
return;
340 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block);
return;
341 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class);
return;
342 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top);
return;
343 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc);
return;
344 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc);
return;
345 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval);
return;
346 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue);
return;
347 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy);
return;
350 rb_bug(
"unreachable");
353#define vm_push_frame_debug_counter_inc(ec, cfp, t)
358rb_vm_stack_canary(
void)
361 return vm_stack_canary;
367STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
368STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
369STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
385 vm_check_frame(
type, specval, cref_or_me, iseq);
386 VM_ASSERT(local_size >= 0);
389 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
390 vm_check_canary(ec, sp);
395 for (
int i=0; i < local_size; i++) {
422 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
423 atomic_signal_fence(memory_order_seq_cst);
431 vm_push_frame_debug_counter_inc(ec, cfp,
type);
439 if (VMDEBUG == 2) SDR();
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
448 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
450 if (VMDEBUG == 2) SDR();
452 RUBY_VM_CHECK_INTS(ec);
453 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
455 return flags & VM_FRAME_FLAG_FINISH;
461 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
469 rb_iseq_t *dmy_iseq = rb_iseq_alloc_with_dummy_path(fname);
473 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH,
475 VM_BLOCK_HANDLER_NONE,
482 return (
VALUE)dmy_iseq;
487rb_arity_error_new(
int argc,
int min,
int max)
489 VALUE err_mess = rb_sprintf(
"wrong number of arguments (given %d, expected %d", argc, min);
497 rb_str_catf(err_mess,
"..%d", max);
504rb_error_arity(
int argc,
int min,
int max)
511NOINLINE(
static void vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v));
514vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
517 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
518 VM_FORCE_WRITE(&ep[index], v);
519 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
520 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
525vm_env_write(
const VALUE *ep,
int index,
VALUE v)
527 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
528 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
529 VM_STACK_ENV_WRITE(ep, index, v);
532 vm_env_write_slowpath(ep, index, v);
537rb_vm_env_write(
const VALUE *ep,
int index,
VALUE v)
539 vm_env_write(ep, index, v);
545 if (block_handler == VM_BLOCK_HANDLER_NONE) {
549 switch (vm_block_handler_type(block_handler)) {
550 case block_handler_type_iseq:
551 case block_handler_type_ifunc:
552 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
553 case block_handler_type_symbol:
554 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
555 case block_handler_type_proc:
556 return VM_BH_TO_PROC(block_handler);
558 VM_UNREACHABLE(rb_vm_bh_to_procval);
567vm_svar_valid_p(
VALUE svar)
570 switch (imemo_type(svar)) {
579 rb_bug(
"vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
589 if (lep && (ec == NULL || ec->root_lep != lep)) {
590 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
593 svar = ec->root_svar;
596 VM_ASSERT(svar ==
Qfalse || vm_svar_valid_p(svar));
604 VM_ASSERT(vm_svar_valid_p((
VALUE)svar));
606 if (lep && (ec == NULL || ec->root_lep != lep)) {
607 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (
VALUE)svar);
610 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
617 const struct vm_svar *svar = lep_svar(ec, lep);
622 case VM_SVAR_LASTLINE:
623 return svar->lastline;
624 case VM_SVAR_BACKREF:
625 return svar->backref;
627 const VALUE ary = svar->others;
642 struct vm_svar *svar = IMEMO_NEW(
struct vm_svar, imemo_svar, obj);
653 struct vm_svar *svar = lep_svar(ec, lep);
656 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
660 case VM_SVAR_LASTLINE:
663 case VM_SVAR_BACKREF:
667 VALUE ary = svar->others;
683 val = lep_svar_get(ec, lep, key);
686 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
703 rb_bug(
"unexpected back-ref");
716 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
726 return rb_reg_last_defined(backref);
728 rb_bug(
"unexpected back-ref");
732 nth = (int)(
type >> 1);
739check_method_entry(
VALUE obj,
int can_be_svar)
741 if (obj ==
Qfalse)
return NULL;
744 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_method_entry: unknown type: %s", rb_obj_info(obj));
747 switch (imemo_type(obj)) {
758 rb_bug(
"check_method_entry: svar should not be there:");
767 const VALUE *ep = cfp->ep;
770 while (!VM_ENV_LOCAL_P(ep)) {
771 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return me;
772 ep = VM_ENV_PREV_EP(ep);
775 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
781 switch (me->def->type) {
782 case VM_METHOD_TYPE_ISEQ:
783 return me->def->body.iseq.
iseqptr;
792 switch (me->def->type) {
793 case VM_METHOD_TYPE_ISEQ:
794 return me->def->body.iseq.
cref;
800#if VM_CHECK_MODE == 0
804check_cref(
VALUE obj,
int can_be_svar)
806 if (obj ==
Qfalse)
return NULL;
809 if (!
RB_TYPE_P(obj,
T_IMEMO)) rb_bug(
"check_cref: unknown type: %s", rb_obj_info(obj));
812 switch (imemo_type(obj)) {
823 rb_bug(
"check_method_entry: svar should not be there:");
830vm_env_cref(
const VALUE *ep)
834 while (!VM_ENV_LOCAL_P(ep)) {
835 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL)
return cref;
836 ep = VM_ENV_PREV_EP(ep);
839 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
843is_cref(
const VALUE v,
int can_be_svar)
846 switch (imemo_type(v)) {
859vm_env_cref_by_cref(
const VALUE *ep)
861 while (!VM_ENV_LOCAL_P(ep)) {
862 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE))
return TRUE;
863 ep = VM_ENV_PREV_EP(ep);
865 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
869cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
871 const VALUE v = *vptr;
875 switch (imemo_type(v)) {
878 new_cref = vm_cref_dup(cref);
883 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
888 return cref_replace_with_duplicated_cref_each_frame(&((
struct vm_svar *)v)->
cref_or_me, FALSE, v);
892 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
901vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
903 if (vm_env_cref_by_cref(ep)) {
907 while (!VM_ENV_LOCAL_P(ep)) {
908 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
909 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
912 ep = VM_ENV_PREV_EP(ep);
914 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
915 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
918 rb_bug(
"vm_cref_dup: unreachable");
923vm_get_cref(
const VALUE *ep)
931 rb_bug(
"vm_get_cref: unreachable");
936rb_vm_get_cref(
const VALUE *ep)
938 return vm_get_cref(ep);
949 return vm_get_cref(cfp->ep);
953vm_get_const_key_cref(
const VALUE *ep)
959 if (RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
960 RCLASS_CLONED_P(CREF_CLASS(cref)) ) {
963 cref = CREF_NEXT(cref);
976 #define ADD_NEW_CREF(new_cref) \
977 if (new_cref_tail) { \
978 RB_OBJ_WRITE(new_cref_tail, &new_cref_tail->next, new_cref); \
981 new_cref_head = new_cref; \
983 new_cref_tail = new_cref;
987 if (CREF_CLASS(cref) == old_klass) {
988 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
989 ADD_NEW_CREF(new_cref);
990 return new_cref_head;
992 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
993 cref = CREF_NEXT(cref);
994 ADD_NEW_CREF(new_cref);
1000 return new_cref_head;
1009 prev_cref = vm_env_cref(ep);
1015 prev_cref = vm_env_cref(cfp->ep);
1019 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
1023vm_get_cbase(
const VALUE *ep)
1025 const rb_cref_t *cref = vm_get_cref(ep);
1027 return CREF_CLASS_FOR_DEFINITION(cref);
1031vm_get_const_base(
const VALUE *ep)
1033 const rb_cref_t *cref = vm_get_cref(ep);
1036 if (!CREF_PUSHED_BY_EVAL(cref)) {
1037 return CREF_CLASS_FOR_DEFINITION(cref);
1039 cref = CREF_NEXT(cref);
1046vm_check_if_namespace(
VALUE klass)
1049 rb_raise(
rb_eTypeError,
"%+"PRIsVALUE
" is not a class/module", klass);
1054vm_ensure_not_refinement_module(
VALUE self)
1057 rb_warn(
"not defined at the refinement, but at the outer class/module");
1073 if (
NIL_P(orig_klass) && allow_nil) {
1075 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1079 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1080 root_cref = CREF_NEXT(root_cref);
1083 while (cref && CREF_NEXT(cref)) {
1084 if (CREF_PUSHED_BY_EVAL(cref)) {
1088 klass = CREF_CLASS(cref);
1090 cref = CREF_NEXT(cref);
1092 if (!
NIL_P(klass)) {
1096 if ((ce = rb_const_lookup(klass,
id))) {
1097 rb_const_warn_if_deprecated(ce, klass,
id);
1100 if (am == klass)
break;
1102 if (is_defined)
return 1;
1103 if (rb_autoloading_value(klass,
id, &av, NULL))
return av;
1105 goto search_continue;
1112 if (UNLIKELY(!rb_ractor_main_p())) {
1114 rb_raise(rb_eRactorIsolationError,
1115 "can not access non-shareable objects in constant %"PRIsVALUE
"::%s by non-main ractor.",
rb_class_path(klass), rb_id2name(
id));
1126 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1127 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1141 vm_check_if_namespace(orig_klass);
1143 return rb_public_const_defined_from(orig_klass,
id);
1146 return rb_public_const_get_from(orig_klass,
id);
1154 return vm_get_ev_const(ec, orig_klass,
id, allow_nil ==
Qtrue, 0);
1162 int allow_nil = TRUE;
1163 if (segments[0] == idNULL) {
1168 while (segments[idx]) {
1169 ID id = segments[idx++];
1170 val = vm_get_ev_const(ec, val,
id, allow_nil, 0);
1183 rb_bug(
"vm_get_cvar_base: no cref");
1186 while (CREF_NEXT(cref) &&
1187 (
NIL_P(CREF_CLASS(cref)) || RCLASS_SINGLETON_P(CREF_CLASS(cref)) ||
1188 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1189 cref = CREF_NEXT(cref);
1191 if (top_level_raise && !CREF_NEXT(cref)) {
1195 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1203ALWAYS_INLINE(
static void fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id));
1205fill_ivar_cache(
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
int is_attr, attr_index_t index, shape_id_t shape_id)
1208 vm_cc_attr_index_set(cc, index, shape_id);
1211 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1215#define ractor_incidental_shareable_p(cond, val) \
1216 (!(cond) || rb_ractor_shareable_p(val))
1217#define ractor_object_incidental_shareable_p(obj, val) \
1218 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1230 return default_value;
1233 shape_id_t shape_id = RBASIC_SHAPE_ID_FOR_READ(obj);
1243 if (UNLIKELY(!rb_ractor_main_p())) {
1251 if (default_value ==
Qundef) {
1259 fields_obj = RCLASS_WRITABLE_FIELDS_OBJ(obj);
1261 return default_value;
1263 ivar_list = rb_imemo_fields_ptr(fields_obj);
1264 shape_id = RBASIC_SHAPE_ID_FOR_READ(fields_obj);
1269 if (rb_obj_exivar_p(obj)) {
1270 VALUE fields_obj = 0;
1271 if (!rb_gen_fields_tbl_get(obj,
id, &fields_obj)) {
1272 return default_value;
1274 ivar_list = rb_imemo_fields_ptr(fields_obj);
1277 return default_value;
1281 shape_id_t cached_id;
1285 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1288 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1291 if (LIKELY(cached_id == shape_id)) {
1294 if (index == ATTR_INDEX_NOT_SET) {
1295 return default_value;
1298 val = ivar_list[index];
1299#if USE_DEBUG_COUNTER
1300 RB_DEBUG_COUNTER_INC(ivar_get_ic_hit);
1303 RB_DEBUG_COUNTER_INC(ivar_get_obj_hit);
1309#if USE_DEBUG_COUNTER
1311 if (cached_id != INVALID_SHAPE_ID) {
1312 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1315 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1319 if (cached_id != INVALID_SHAPE_ID) {
1320 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1323 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1326 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1329 RB_DEBUG_COUNTER_INC(ivar_get_obj_miss);
1333 if (rb_shape_too_complex_p(shape_id)) {
1338 table = rb_imemo_fields_complex_tbl(fields_obj);
1342 table = ROBJECT_FIELDS_HASH(obj);
1347 if (rb_gen_fields_tbl_get(obj, 0, &fields_obj)) {
1348 table = rb_imemo_fields_complex_tbl(fields_obj);
1354 if (!table || !st_lookup(table,
id, &val)) {
1355 val = default_value;
1359 shape_id_t previous_cached_id = cached_id;
1360 if (rb_shape_get_iv_index_with_hint(shape_id,
id, &index, &cached_id)) {
1363 if (cached_id != previous_cached_id) {
1364 fill_ivar_cache(iseq, ic, cc, is_attr, index, cached_id);
1367 if (index == ATTR_INDEX_NOT_SET) {
1368 val = default_value;
1372 val = ivar_list[index];
1378 vm_cc_attr_index_initialize(cc, shape_id);
1381 vm_ic_attr_index_initialize(ic, shape_id);
1384 val = default_value;
1390 if (!UNDEF_P(default_value)) {
1399 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1402 return rb_attr_get(obj,
id);
1410populate_cache(attr_index_t index, shape_id_t next_shape_id,
ID id,
const rb_iseq_t *iseq,
IVC ic,
const struct rb_callcache *cc,
bool is_attr)
1412 RUBY_ASSERT(!rb_shape_too_complex_p(next_shape_id));
1416 vm_cc_attr_index_set(cc, index, next_shape_id);
1419 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1431 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1434 rb_check_frozen(obj);
1436 attr_index_t index = rb_obj_ivar_set(obj,
id, val);
1438 shape_id_t next_shape_id = RBASIC_SHAPE_ID(obj);
1440 if (!rb_shape_too_complex_p(next_shape_id)) {
1441 populate_cache(index, next_shape_id,
id, iseq, ic, cc, is_attr);
1444 RB_DEBUG_COUNTER_INC(ivar_set_obj_miss);
1454 return vm_setivar_slowpath(obj,
id, val, iseq, ic, NULL,
false);
1460 return vm_setivar_slowpath(obj,
id, val, NULL, NULL, cc,
true);
1463NOINLINE(
static VALUE vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1465vm_setivar_default(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1467 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1469 VALUE fields_obj = 0;
1472 if (shape_id == dest_shape_id) {
1473 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1475 else if (dest_shape_id != INVALID_SHAPE_ID) {
1476 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1477 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1487 rb_gen_fields_tbl_get(obj, 0, &fields_obj);
1489 if (shape_id != dest_shape_id) {
1490 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1493 RB_OBJ_WRITE(obj, &rb_imemo_fields_ptr(fields_obj)[index], val);
1495 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1501vm_setivar(
VALUE obj,
ID id,
VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1509 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1510 RUBY_ASSERT(dest_shape_id == INVALID_SHAPE_ID || !rb_shape_too_complex_p(dest_shape_id));
1512 if (LIKELY(shape_id == dest_shape_id)) {
1513 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1516 else if (dest_shape_id != INVALID_SHAPE_ID) {
1517 if (RSHAPE_DIRECT_CHILD_P(shape_id, dest_shape_id) && RSHAPE_EDGE_NAME(dest_shape_id) ==
id && RSHAPE_CAPACITY(shape_id) == RSHAPE_CAPACITY(dest_shape_id)) {
1518 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1520 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1522 RUBY_ASSERT(index < RSHAPE_CAPACITY(dest_shape_id));
1537 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1538 RB_DEBUG_COUNTER_INC(ivar_set_obj_hit);
1544 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1556 VALUE defined_class = 0;
1560 defined_class =
RBASIC(defined_class)->klass;
1563 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1565 rb_bug(
"the cvc table should be set");
1569 if (!rb_id_table_lookup(rb_cvc_tbl,
id, &ent_data)) {
1570 rb_bug(
"should have cvar cache entry");
1575 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1591 cref = vm_get_cref(GET_EP());
1593 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1594 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1596 VALUE v = rb_ivar_lookup(ic->entry->class_value,
id,
Qundef);
1602 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1604 return update_classvariable_cache(iseq, klass,
id, cref, ic);
1610 return vm_getclassvariable(iseq, cfp,
id, ic);
1617 cref = vm_get_cref(GET_EP());
1619 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1620 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1622 rb_class_ivar_set(ic->entry->class_value,
id, val);
1626 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1630 update_classvariable_cache(iseq, klass,
id, cref, ic);
1636 vm_setclassvariable(iseq, cfp,
id, val, ic);
1642 return vm_getivar(obj,
id, iseq, ic, NULL, FALSE,
Qnil);
1653 shape_id_t dest_shape_id;
1655 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1657 if (UNLIKELY(UNDEF_P(vm_setivar(obj,
id, val, dest_shape_id, index)))) {
1664 if (!UNDEF_P(vm_setivar_default(obj,
id, val, dest_shape_id, index))) {
1668 vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1675 vm_setinstancevariable(iseq, obj,
id, val, ic);
1684 ec->tag->state = RUBY_TAG_FATAL;
1687 ec->tag->state = TAG_THROW;
1689 else if (THROW_DATA_P(err)) {
1690 ec->tag->state = THROW_DATA_STATE((
struct vm_throw_data *)err);
1693 ec->tag->state = TAG_RAISE;
1700 const int flag,
const VALUE throwobj)
1708 else if (state == TAG_BREAK) {
1710 const VALUE *ep = GET_EP();
1711 const rb_iseq_t *base_iseq = GET_ISEQ();
1712 escape_cfp = reg_cfp;
1714 while (ISEQ_BODY(base_iseq)->
type != ISEQ_TYPE_BLOCK) {
1715 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1716 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1717 ep = escape_cfp->ep;
1718 base_iseq = escape_cfp->iseq;
1721 ep = VM_ENV_PREV_EP(ep);
1722 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1723 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1724 VM_ASSERT(escape_cfp->iseq == base_iseq);
1728 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1734 ep = VM_ENV_PREV_EP(ep);
1736 while (escape_cfp < eocfp) {
1737 if (escape_cfp->ep == ep) {
1738 const rb_iseq_t *
const iseq = escape_cfp->iseq;
1739 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1744 for (i=0; i < ct->size; i++) {
1746 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1748 if (entry->type == CATCH_TYPE_BREAK &&
1749 entry->iseq == base_iseq &&
1750 entry->start < epc && entry->end >= epc) {
1751 if (entry->cont == epc) {
1760 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1765 rb_vm_localjump_error(
"break from proc-closure", throwobj, TAG_BREAK);
1768 else if (state == TAG_RETRY) {
1769 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1771 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1773 else if (state == TAG_RETURN) {
1774 const VALUE *current_ep = GET_EP();
1775 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1776 int in_class_frame = 0;
1778 escape_cfp = reg_cfp;
1781 while (!VM_ENV_LOCAL_P(ep)) {
1782 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1785 ep = VM_ENV_PREV_EP(ep);
1789 while (escape_cfp < eocfp) {
1790 const VALUE *lep = VM_CF_LEP(escape_cfp);
1796 if (lep == target_lep &&
1797 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1798 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1803 if (lep == target_lep) {
1804 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1806 if (in_class_frame) {
1811 const VALUE *tep = current_ep;
1813 while (target_lep != tep) {
1814 if (escape_cfp->ep == tep) {
1816 if (tep == target_ep) {
1820 goto unexpected_return;
1823 tep = VM_ENV_PREV_EP(tep);
1827 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1828 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1830 case ISEQ_TYPE_MAIN:
1832 if (in_class_frame)
goto unexpected_return;
1833 if (target_ep == NULL) {
1837 goto unexpected_return;
1841 case ISEQ_TYPE_EVAL: {
1843 enum rb_iseq_type t = ISEQ_BODY(is)->type;
1844 while (t == ISEQ_TYPE_RESCUE || t == ISEQ_TYPE_ENSURE || t == ISEQ_TYPE_EVAL) {
1845 if (!(is = ISEQ_BODY(is)->parent_iseq))
break;
1846 t = ISEQ_BODY(is)->type;
1848 toplevel = t == ISEQ_TYPE_TOP || t == ISEQ_TYPE_MAIN;
1851 case ISEQ_TYPE_CLASS:
1860 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1861 if (target_ep == NULL) {
1865 goto unexpected_return;
1869 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1872 rb_vm_localjump_error(
"unexpected return", throwobj, TAG_RETURN);
1878 rb_bug(
"isns(throw): unsupported throw type");
1881 ec->tag->state = state;
1882 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1887 rb_num_t throw_state,
VALUE throwobj)
1889 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1890 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1893 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1896 return vm_throw_continue(ec, throwobj);
1903 return vm_throw(ec, reg_cfp, throw_state, throwobj);
1909 int is_splat = flag & 0x01;
1912 const VALUE obj = ary;
1924 if (num + is_splat == 0) {
1927 else if (flag & 0x02) {
1932 for (i = 0; i < num -
len; i++) {
1937 for (j = 0; i < num; i++, j++) {
1959 for (; i < num -
len; i++) {
1963 for (rb_num_t j = 0; i < num; i++, j++) {
1964 *cfp->sp++ = ptr[
len - j - 1];
1968 for (rb_num_t j = 0; j < num; j++) {
1969 *cfp->sp++ = ptr[num - j - 1];
1984 int initial_capa = 2;
1986#if VM_CHECK_MODE > 0
1987 ccs->debug_sig = ~(
VALUE)ccs;
1989 ccs->capa = initial_capa;
1994 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2002 if (! vm_cc_markable(cc)) {
2006 if (UNLIKELY(ccs->len == ccs->capa)) {
2009 ccs = ruby_xrealloc(ccs, vm_ccs_alloc_size(ccs->capa));
2010#if VM_CHECK_MODE > 0
2011 ccs->debug_sig = ~(
VALUE)ccs;
2014 rb_managed_id_table_insert(cc_tbl, mid, (
VALUE)ccs);
2016 VM_ASSERT(ccs->len < ccs->capa);
2018 const int pos = ccs->len++;
2019 ccs->entries[pos].argc = vm_ci_argc(ci);
2020 ccs->entries[pos].flag = vm_ci_flag(ci);
2023 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
2029#if VM_CHECK_MODE > 0
2033 ruby_debug_printf(
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
2034 for (
int i=0; i<ccs->len; i++) {
2035 ruby_debug_printf(
"CCS CI ID:flag:%x argc:%u\n",
2036 ccs->entries[i].flag,
2037 ccs->entries[i].argc);
2038 rp(ccs->entries[i].cc);
2045 VM_ASSERT(vm_ccs_p(ccs));
2046 VM_ASSERT(ccs->len <= ccs->capa);
2048 for (
int i=0; i<ccs->len; i++) {
2051 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2052 VM_ASSERT(vm_cc_class_check(cc, klass));
2053 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
2054 VM_ASSERT(!vm_cc_super_p(cc));
2055 VM_ASSERT(!vm_cc_refinement_p(cc));
2066 ASSERT_vm_locking();
2068 if (rb_multi_ractor_p()) {
2069 if (RCLASS_WRITABLE_CC_TBL(klass) != cc_tbl) {
2076 rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs);
2078 if (!ccs || !METHOD_ENTRY_INVALIDATED(ccs->cme)) {
2083 VALUE new_table = rb_vm_cc_table_dup(cc_tbl);
2084 rb_vm_cc_table_delete(new_table, mid);
2085 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), new_table);
2088 rb_vm_cc_table_delete(cc_tbl, mid);
2095 ASSERT_vm_locking();
2097 VALUE cc_tbl = RCLASS_WRITABLE_CC_TBL(klass);
2098 const VALUE original_cc_table = cc_tbl;
2102 cc_tbl = rb_vm_cc_table_create(1);
2104 else if (rb_multi_ractor_p()) {
2105 cc_tbl = rb_vm_cc_table_dup(cc_tbl);
2108 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2114 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2116 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2119 cme = rb_callable_method_entry(klass, mid);
2122 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2126 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2127 return &vm_empty_cc;
2130 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2137 if (!LIKELY(rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs))) {
2139 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2143 cme = rb_check_overloaded_cme(cme, ci);
2145 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general, cc_type_normal);
2146 vm_ccs_push(cc_tbl, mid, ccs, ci, cc);
2148 VM_ASSERT(vm_cc_cme(cc) != NULL);
2149 VM_ASSERT(cme->called_id == mid);
2150 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2152 if (original_cc_table != cc_tbl) {
2153 RB_OBJ_ATOMIC_WRITE(klass, &RCLASS_WRITABLE_CC_TBL(klass), cc_tbl);
2165 cc_tbl = RUBY_ATOMIC_VALUE_LOAD(RCLASS_WRITABLE_CC_TBL(klass));
2172 if (rb_managed_id_table_lookup(cc_tbl, mid, (
VALUE *)&ccs)) {
2173 const int ccs_len = ccs->len;
2175 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2177 vm_evict_cc(klass, cc_tbl, mid);
2182 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2187 unsigned int argc = vm_ci_argc(ci);
2188 unsigned int flag = vm_ci_flag(ci);
2190 for (
int i=0; i<ccs_len; i++) {
2191 unsigned int ccs_ci_argc = ccs->entries[i].argc;
2192 unsigned int ccs_ci_flag = ccs->entries[i].flag;
2193 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2195 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2197 if (ccs_ci_argc == argc && ccs_ci_flag == flag) {
2198 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2200 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2201 VM_ASSERT(ccs_cc->klass == klass);
2202 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2218 const ID mid = vm_ci_mid(ci);
2220 const struct rb_callcache *cc = vm_lookup_cc(klass, ci, mid);
2226 if (rb_multi_ractor_p()) {
2229 cc = vm_lookup_cc(klass, ci, mid);
2233 cc = vm_populate_cc(klass, ci, mid);
2247 cc = vm_search_cc(klass, ci);
2250 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2251 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2252 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2253 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2254 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2262#if USE_DEBUG_COUNTER
2266 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2268#if OPT_INLINE_METHOD_CACHE
2272 if (cd_owner && cc != empty_cc) {
2276#if USE_DEBUG_COUNTER
2277 if (!old_cc || old_cc == empty_cc) {
2279 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2281 else if (old_cc == cc) {
2282 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2284 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2285 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2287 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2288 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2289 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2292 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2297 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2298 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2309#if OPT_INLINE_METHOD_CACHE
2310 if (LIKELY(vm_cc_class_check(cc, klass))) {
2311 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2312 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2313 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2314 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2315 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) ||
2316 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2320 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2323 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2327 return vm_search_method_slowpath0(cd_owner, cd, klass);
2334 VM_ASSERT(klass !=
Qfalse);
2337 return vm_search_method_fastpath(cd_owner, cd, klass);
2340#if __has_attribute(transparent_union)
2353 VALUE (*f10)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2354 VALUE (*f11)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2355 VALUE (*f12)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2356 VALUE (*f13)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2357 VALUE (*f14)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2358 VALUE (*f15)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE);
2361# define make_cfunc_type(f) (cfunc_type){.anyargs = (VALUE (*)(ANYARGS))(f)}
2364# define make_cfunc_type(f) (cfunc_type)(f)
2374 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2375 VM_ASSERT(callable_method_entry_p(me));
2377 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2381#if __has_attribute(transparent_union)
2382 return me->def->body.cfunc.func == func.anyargs;
2384 return me->def->body.cfunc.func == func;
2393 return me && METHOD_ENTRY_BASIC(me);
2399 VM_ASSERT(iseq != NULL);
2401 return check_cfunc(vm_cc_cme(cc), func);
2404#define check_cfunc(me, func) check_cfunc(me, make_cfunc_type(func))
2405#define vm_method_cfunc_is(iseq, cd, recv, func) vm_method_cfunc_is(iseq, cd, recv, make_cfunc_type(func))
2407#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2439opt_equality_specialized(
VALUE recv,
VALUE obj)
2441 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2442 goto compare_by_identity;
2444 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2445 goto compare_by_identity;
2448 goto compare_by_identity;
2457#if MSC_VERSION_BEFORE(1300)
2461 else if (isnan(b)) {
2466 return RBOOL(a == b);
2473 return rb_str_eql_internal(obj, recv);
2478 compare_by_identity:
2479 return RBOOL(recv == obj);
2485 VM_ASSERT(cd_owner != NULL);
2487 VALUE val = opt_equality_specialized(recv, obj);
2488 if (!UNDEF_P(val))
return val;
2490 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2494 return RBOOL(recv == obj);
2498#undef EQ_UNREDEFINED_P
2501NOINLINE(
static VALUE opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid));
2504opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
2506 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, &VM_CI_ON_STACK(mid, 0, 1, NULL));
2508 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2509 return RBOOL(recv == obj);
2519 VALUE val = opt_equality_specialized(recv, obj);
2520 if (!UNDEF_P(val)) {
2524 return opt_equality_by_mid_slowpath(recv, obj, mid);
2531 return opt_equality_by_mid(obj1, obj2, idEq);
2537 return opt_equality_by_mid(obj1, obj2, idEqlP);
2547 case VM_CHECKMATCH_TYPE_WHEN:
2549 case VM_CHECKMATCH_TYPE_RESCUE:
2551 rb_raise(
rb_eTypeError,
"class or module required for rescue clause");
2554 case VM_CHECKMATCH_TYPE_CASE: {
2555 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target,
RB_NO_KEYWORDS);
2558 rb_bug(
"check_match: unreachable");
2563#if MSC_VERSION_BEFORE(1300)
2564#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2566#define CHECK_CMP_NAN(a, b)
2570double_cmp_lt(
double a,
double b)
2572 CHECK_CMP_NAN(a, b);
2573 return RBOOL(a < b);
2577double_cmp_le(
double a,
double b)
2579 CHECK_CMP_NAN(a, b);
2580 return RBOOL(a <= b);
2584double_cmp_gt(
double a,
double b)
2586 CHECK_CMP_NAN(a, b);
2587 return RBOOL(a > b);
2591double_cmp_ge(
double a,
double b)
2593 CHECK_CMP_NAN(a, b);
2594 return RBOOL(a >= b);
2598static inline VALUE *
2603 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2604 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2606 if (ISEQ_BODY(cfp->iseq)->param.flags.forwardable && VM_ENV_LOCAL_P(cfp->ep)) {
2607 int lts = ISEQ_BODY(cfp->iseq)->local_table_size;
2608 int params = ISEQ_BODY(cfp->iseq)->param.size;
2611 bp += vm_ci_argc(ci);
2614 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD || VM_FRAME_BMETHOD_P(cfp)) {
2618#if VM_DEBUG_BP_CHECK
2619 if (bp != cfp->bp_check) {
2620 ruby_debug_printf(
"bp_check: %ld, bp: %ld\n",
2621 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2622 (
long)(bp - GET_EC()->vm_stack));
2623 rb_bug(
"vm_base_ptr: unreachable");
2636 return vm_base_ptr(cfp);
2651static vm_call_handler vm_call_iseq_setup_func(
const struct rb_callinfo *ci,
const int param_size,
const int local_size);
2656 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2658 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2664 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2667 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2668 int param = ISEQ_BODY(iseq)->param.size;
2669 int local = ISEQ_BODY(iseq)->local_table_size;
2670 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2676 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2677 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2678 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2679 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2680 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2681 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2682 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2683 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2687rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2689 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2690 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2691 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2692 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2693 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2694 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2695 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2696 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2700rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2702 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2703 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2704 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2705 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2706 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2707 ISEQ_BODY(iseq)->param.flags.forwardable == FALSE &&
2708 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2711#define ALLOW_HEAP_ARGV (-2)
2712#define ALLOW_HEAP_ARGV_KEEP_KWSPLAT (-3)
2717 vm_check_canary(GET_EC(), cfp->sp);
2723 int argc = calling->argc;
2725 if (UNLIKELY(max_args <= ALLOW_HEAP_ARGV && len + argc > VM_ARGC_STACK_MAX)) {
2729 VALUE *argv = cfp->sp - argc;
2733 cfp->sp -= argc - 1;
2734 cfp->sp[-1] = argv_ary;
2736 calling->heap_argv = argv_ary;
2742 if (max_args >= 0 &&
len + argc > max_args) {
2750 calling->argc +=
len - (max_args - argc + 1);
2751 len = max_args - argc + 1;
2760 calling->heap_argv = 0;
2762 CHECK_VM_STACK_OVERFLOW(cfp,
len);
2764 for (i = 0; i <
len; i++) {
2765 *cfp->sp++ = ptr[i];
2777 const VALUE *
const passed_keywords = vm_ci_kwarg(ci)->keywords;
2778 const int kw_len = vm_ci_kwarg(ci)->keyword_len;
2779 const VALUE h = rb_hash_new_with_size(kw_len);
2780 VALUE *sp = cfp->sp;
2783 for (i=0; i<kw_len; i++) {
2784 rb_hash_aset(h, passed_keywords[i], (sp - kw_len)[i]);
2788 cfp->sp -= kw_len - 1;
2789 calling->argc -= kw_len - 1;
2790 calling->kw_splat = 1;
2794vm_caller_setup_keyword_hash(
const struct rb_callinfo *ci,
VALUE keyword_hash)
2797 if (keyword_hash !=
Qnil) {
2799 keyword_hash = rb_hash_dup(rb_to_hash_type(keyword_hash));
2802 else if (!IS_ARGS_KW_SPLAT_MUT(ci) && !
RHASH_EMPTY_P(keyword_hash)) {
2808 keyword_hash = rb_hash_dup(keyword_hash);
2810 return keyword_hash;
2816 const struct rb_callinfo *restrict ci,
int max_args)
2818 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2819 if (IS_ARGS_KW_SPLAT(ci)) {
2821 VM_ASSERT(calling->kw_splat == 1);
2825 VALUE ary = cfp->sp[0];
2826 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[1]);
2829 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args))
return;
2833 if (UNLIKELY(calling->heap_argv)) {
2835 ((
struct RHash *)kwh)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
2836 if (max_args != ALLOW_HEAP_ARGV_KEEP_KWSPLAT) {
2837 calling->kw_splat = 0;
2845 VM_ASSERT(calling->kw_splat == 1);
2849 calling->kw_splat = 0;
2854 VM_ASSERT(calling->kw_splat == 0);
2858 VALUE ary = cfp->sp[0];
2860 if (vm_caller_setup_arg_splat(cfp, calling, ary, max_args)) {
2865 VALUE last_hash, argv_ary;
2866 if (UNLIKELY(argv_ary = calling->heap_argv)) {
2867 if (!IS_ARGS_KEYWORD(ci) &&
2870 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2875 calling->kw_splat = 1;
2881 if (!IS_ARGS_KEYWORD(ci) &&
2882 calling->argc > 0 &&
2884 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2891 cfp->sp[-1] = rb_hash_dup(last_hash);
2892 calling->kw_splat = 1;
2898 else if (UNLIKELY(IS_ARGS_KW_SPLAT(ci))) {
2900 VM_ASSERT(calling->kw_splat == 1);
2901 VALUE kwh = vm_caller_setup_keyword_hash(ci, cfp->sp[-1]);
2906 calling->kw_splat = 0;
2912 else if (UNLIKELY(IS_ARGS_KEYWORD(ci))) {
2914 VM_ASSERT(calling->kw_splat == 0);
2920 vm_caller_setup_arg_kw(cfp, calling, ci);
2924#define USE_OPT_HIST 0
2927#define OPT_HIST_MAX 64
2928static int opt_hist[OPT_HIST_MAX+1];
2932opt_hist_show_results_at_exit(
void)
2934 for (
int i=0; i<OPT_HIST_MAX; i++) {
2935 ruby_debug_printf(
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2945 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2946 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2947 const int opt = calling->argc - lead_num;
2948 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2949 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2950 const int param = ISEQ_BODY(iseq)->param.size;
2951 const int local = ISEQ_BODY(iseq)->local_table_size;
2952 const int delta = opt_num - opt;
2954 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2957 if (opt_pc < OPT_HIST_MAX) {
2961 opt_hist[OPT_HIST_MAX]++;
2965 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2973 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2974 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2975 const int opt = calling->argc - lead_num;
2976 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2978 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2981 if (opt_pc < OPT_HIST_MAX) {
2985 opt_hist[OPT_HIST_MAX]++;
2989 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2994 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2995 VALUE *
const locals);
3002 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3003 int param_size = ISEQ_BODY(iseq)->param.size;
3004 int local_size = ISEQ_BODY(iseq)->local_table_size;
3007 VM_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3009 local_size = local_size + vm_ci_argc(calling->cd->ci);
3010 param_size = param_size + vm_ci_argc(calling->cd->ci);
3012 cfp->sp[0] = (
VALUE)calling->cd->ci;
3014 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param_size, local_size);
3024 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
3025 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
3027 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3028 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3030 const int ci_kw_len = kw_arg->keyword_len;
3031 const VALUE *
const ci_keywords = kw_arg->keywords;
3032 VALUE *argv = cfp->sp - calling->argc;
3033 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3034 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3036 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3037 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3039 int param = ISEQ_BODY(iseq)->param.size;
3040 int local = ISEQ_BODY(iseq)->local_table_size;
3041 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3048 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->cd->ci;
3051 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
3052 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
3054 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3055 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3056 VALUE *
const argv = cfp->sp - calling->argc;
3057 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3060 for (i=0; i<kw_param->num; i++) {
3061 klocals[i] = kw_param->default_values[i];
3068 int param = ISEQ_BODY(iseq)->param.size;
3069 int local = ISEQ_BODY(iseq)->local_table_size;
3070 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
3080 cfp->sp -= (calling->argc + 1);
3081 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
3082 return builtin_invoker0(ec, calling->recv, NULL, func_ptr);
3091 set_table *dup_check_table = vm->unused_block_warning_table;
3101 .v = (
VALUE)cme->def,
3105 if (!strict_unused_block) {
3106 key = (st_data_t)cme->def->original_id;
3108 if (set_table_lookup(dup_check_table, key)) {
3118 key |= (st_data_t)(k1.b[i] ^ k2.b[
SIZEOF_VALUE-1-i]) << (8 * i);
3123 fprintf(stderr,
"pc:%p def:%p\n", pc, (
void *)cme->def);
3124 fprintf(stderr,
"key:%p\n", (
void *)key);
3128 if (set_insert(dup_check_table, key)) {
3133 VALUE name = rb_gen_method_name(cme->defined_class, ISEQ_BODY(iseq)->location.base_label);
3135 if (!
NIL_P(m_loc)) {
3136 rb_warn(
"the block passed to '%"PRIsVALUE
"' defined at %"PRIsVALUE
":%"PRIsVALUE
" may be ignored",
3140 rb_warn(
"the block may be ignored because '%"PRIsVALUE
"' does not use a block", name);
3147 const rb_iseq_t *iseq,
VALUE *argv,
int param_size,
int local_size)
3152 VM_ASSERT((vm_ci_argc(ci), 1));
3153 VM_ASSERT(vm_cc_cme(cc) != NULL);
3155 if (UNLIKELY(!ISEQ_BODY(iseq)->param.flags.use_block &&
3156 calling->block_handler != VM_BLOCK_HANDLER_NONE &&
3157 !(vm_ci_flag(calling->cd->ci) & (VM_CALL_OPT_SEND | VM_CALL_SUPER)))) {
3158 warn_unused_block(vm_cc_cme(cc), iseq, (
void *)ec->cfp->pc);
3161 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
3162 if (LIKELY(rb_simple_iseq_p(iseq))) {
3164 int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3165 CALLER_SETUP_ARG(cfp, calling, ci, lead_num);
3167 if (calling->argc != lead_num) {
3168 argument_arity_error(ec, iseq, vm_cc_cme(cc), calling->argc, lead_num, lead_num);
3172 VM_ASSERT(cc == calling->cc);
3174 if (vm_call_iseq_optimizable_p(ci, cc)) {
3175 if ((iseq->body->builtin_attrs & BUILTIN_ATTR_SINGLE_NOARG_LEAF) &&
3177 VM_ASSERT(iseq->body->builtin_attrs & BUILTIN_ATTR_LEAF);
3178 vm_cc_bf_set(cc, (
void *)iseq->body->iseq_encoded[1]);
3179 CC_SET_FASTPATH(cc, vm_call_single_noarg_leaf_builtin,
true);
3182 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size),
true);
3187 else if (rb_iseq_only_optparam_p(iseq)) {
3190 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3191 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
3193 CALLER_SETUP_ARG(cfp, calling, ci, lead_num + opt_num);
3194 const int argc = calling->argc;
3195 const int opt = argc - lead_num;
3197 if (opt < 0 || opt > opt_num) {
3198 argument_arity_error(ec, iseq, vm_cc_cme(cc), argc, lead_num, lead_num + opt_num);
3201 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3202 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
3203 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3204 vm_call_cacheable(ci, cc));
3207 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
3208 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
3209 vm_call_cacheable(ci, cc));
3213 VM_ASSERT((
int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
3214 for (
int i=argc; i<lead_num + opt_num; i++) {
3217 return (
int)ISEQ_BODY(iseq)->param.opt_table[opt];
3219 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
3220 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
3221 const int argc = calling->argc;
3222 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
3224 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
3227 if (argc - kw_arg->keyword_len == lead_num) {
3228 const int ci_kw_len = kw_arg->keyword_len;
3229 const VALUE *
const ci_keywords = kw_arg->keywords;
3231 MEMCPY(ci_kws, argv + lead_num,
VALUE, ci_kw_len);
3233 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3234 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), ci_kws, ci_kw_len, ci_keywords, klocals);
3236 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
3237 vm_call_cacheable(ci, cc));
3242 else if (argc == lead_num) {
3244 VALUE *
const klocals = argv + kw_param->bits_start - kw_param->num;
3245 args_setup_kw_parameters(ec, iseq, vm_cc_cme(cc), NULL, 0, NULL, klocals);
3247 if (klocals[kw_param->num] ==
INT2FIX(0)) {
3249 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
3250 vm_call_cacheable(ci, cc));
3276 if (ISEQ_BODY(iseq)->param.flags.forwardable) {
3277 bool can_fastpath =
true;
3279 if ((vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3281 if (vm_ci_argc(ci) != vm_ci_argc(forward_cd->caller_ci)) {
3282 ci = vm_ci_new_runtime(
3289 ci = forward_cd->caller_ci;
3291 can_fastpath =
false;
3295 if (!vm_ci_markable(ci)) {
3296 ci = vm_ci_new_runtime(
3301 can_fastpath =
false;
3303 argv[param_size - 1] = (
VALUE)ci;
3304 CC_SET_FASTPATH(cc, vm_call_iseq_forwardable, can_fastpath);
3308 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
3335 const VALUE * lep = VM_CF_LEP(cfp);
3341 if (VM_ENV_FLAGS(lep, VM_ENV_FLAG_ESCAPED)) {
3346 iseq = rb_vm_search_cf_from_ep(ec, cfp, lep)->iseq;
3350 int local_size = ISEQ_BODY(iseq)->local_table_size + argc;
3352 const VALUE * from = lep - (local_size + VM_ENV_DATA_SIZE - 1);
3353 VALUE * to = cfp->sp - 1;
3357 CHECK_VM_STACK_OVERFLOW0(cfp, to,
RARRAY_LEN(splat));
3362 CHECK_VM_STACK_OVERFLOW0(cfp, to, argc);
3364 cfp->sp = to + argc;
3383 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3386 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3387 int param_size = ISEQ_BODY(iseq)->param.size;
3388 int local_size = ISEQ_BODY(iseq)->local_table_size;
3390 RUBY_ASSERT(!ISEQ_BODY(iseq)->param.flags.forwardable);
3392 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3393 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3399 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
3402 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
3403 int param_size = ISEQ_BODY(iseq)->param.size;
3404 int local_size = ISEQ_BODY(iseq)->local_table_size;
3406 RUBY_ASSERT(ISEQ_BODY(iseq)->param.flags.forwardable);
3409 local_size = local_size + vm_ci_argc(calling->cd->ci);
3410 param_size = param_size + vm_ci_argc(calling->cd->ci);
3412 const int opt_pc = vm_callee_setup_arg(ec, calling, iseq, cfp->sp - calling->argc, param_size, local_size);
3413 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
3418 int opt_pc,
int param_size,
int local_size)
3423 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
3424 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
3427 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
3433 int opt_pc,
int param_size,
int local_size)
3435 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3436 VALUE *argv = cfp->sp - calling->argc;
3437 VALUE *sp = argv + param_size;
3438 cfp->sp = argv - 1 ;
3440 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
3441 calling->block_handler, (
VALUE)me,
3442 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3443 local_size - param_size,
3444 ISEQ_BODY(iseq)->stack_max);
3453 VALUE *argv = cfp->sp - calling->argc;
3455 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
3456 VALUE *src_argv = argv;
3457 VALUE *sp_orig, *sp;
3458 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
3460 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
3461 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
3462 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
3463 dst_captured->code.val = src_captured->code.val;
3464 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
3465 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
3468 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
3472 vm_pop_frame(ec, cfp, cfp->ep);
3475 sp_orig = sp = cfp->sp;
3478 sp[0] = calling->recv;
3482 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
3483 *sp++ = src_argv[i];
3486 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
3487 calling->recv, calling->block_handler, (
VALUE)me,
3488 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
3489 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
3490 ISEQ_BODY(iseq)->stack_max);
3498ractor_unsafe_check(
void)
3500 if (!rb_ractor_main_p()) {
3501 rb_raise(rb_eRactorUnsafeError,
"ractor unsafe method called from not main ractor");
3508 ractor_unsafe_check();
3516 ractor_unsafe_check();
3518 return (*f)(argc, argv, recv);
3524 ractor_unsafe_check();
3532 ractor_unsafe_check();
3534 return (*f)(recv, argv[0]);
3540 ractor_unsafe_check();
3542 return (*f)(recv, argv[0], argv[1]);
3548 ractor_unsafe_check();
3550 return (*f)(recv, argv[0], argv[1], argv[2]);
3556 ractor_unsafe_check();
3558 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3564 ractor_unsafe_check();
3565 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3566 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3572 ractor_unsafe_check();
3573 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3574 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3580 ractor_unsafe_check();
3581 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3582 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3588 ractor_unsafe_check();
3589 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3590 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3596 ractor_unsafe_check();
3597 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3598 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3604 ractor_unsafe_check();
3605 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3606 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3612 ractor_unsafe_check();
3613 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3614 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3620 ractor_unsafe_check();
3621 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3622 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3628 ractor_unsafe_check();
3629 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3630 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3636 ractor_unsafe_check();
3637 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3638 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3644 ractor_unsafe_check();
3645 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3646 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3660 return (*f)(argc, argv, recv);
3674 return (*f)(recv, argv[0]);
3681 return (*f)(recv, argv[0], argv[1]);
3688 return (*f)(recv, argv[0], argv[1], argv[2]);
3695 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3701 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3702 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3708 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3709 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3715 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3716 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3722 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3723 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3729 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3730 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3736 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3737 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3743 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3744 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3750 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3751 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3757 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3758 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3764 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3765 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3771 VALUE(*f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
3772 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3778 const int ov_flags = RAISED_STACKOVERFLOW;
3779 if (LIKELY(reg_cfp == ec->cfp + 1))
return TRUE;
3780 if (rb_ec_raised_p(ec, ov_flags)) {
3781 rb_ec_raised_reset(ec, ov_flags);
3787#define CHECK_CFP_CONSISTENCY(func) \
3788 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3789 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3795#if VM_DEBUG_VERIFY_METHOD_CACHE
3796 switch (me->def->type) {
3797 case VM_METHOD_TYPE_CFUNC:
3798 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3800# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3802 METHOD_BUG(ATTRSET);
3804 METHOD_BUG(BMETHOD);
3807 METHOD_BUG(OPTIMIZED);
3808 METHOD_BUG(MISSING);
3809 METHOD_BUG(REFINED);
3813 rb_bug(
"wrong method type: %d", me->def->type);
3816 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3823 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3830 VALUE recv = calling->recv;
3831 VALUE block_handler = calling->block_handler;
3832 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3834 if (UNLIKELY(calling->kw_splat)) {
3835 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3838 VM_ASSERT(reg_cfp == ec->cfp);
3840 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3843 vm_push_frame(ec, NULL, frame_type, recv,
3844 block_handler, (
VALUE)me,
3845 0, ec->cfp->sp, 0, 0);
3847 int len = cfunc->argc;
3850 reg_cfp->sp = stack_bottom;
3851 val = (*cfunc->invoker)(recv, argc, argv, cfunc->func);
3853 CHECK_CFP_CONSISTENCY(
"vm_call_cfunc");
3855 rb_vm_pop_frame(ec);
3857 VM_ASSERT(ec->cfp->sp == stack_bottom);
3859 EXEC_EVENT_HOOK(ec,
RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3860 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3870 VM_ASSERT(cme->def->type == VM_METHOD_TYPE_CFUNC);
3872 VALUE *sp = ec->cfp->sp;
3873 VALUE recv = *(sp - recv_idx - 1);
3874 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3875 VALUE block_handler = VM_BLOCK_HANDLER_NONE;
3876#if VM_CHECK_MODE > 0
3878 *(GET_EC()->cfp->sp) =
Qfalse;
3880 vm_push_frame(ec, NULL, frame_type, recv, block_handler, (
VALUE)cme, 0, ec->cfp->sp, 0, 0);
3885rb_splat_or_kwargs_p(
const struct rb_callinfo *restrict ci)
3887 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
3893 int argc = calling->argc;
3894 VALUE *stack_bottom = reg_cfp->sp - argc - 1;
3895 VALUE *argv = &stack_bottom[1];
3897 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3904 RB_DEBUG_COUNTER_INC(ccf_cfunc_other);
3906 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
3908 if (UNLIKELY(argv_ary = calling->heap_argv)) {
3909 VM_ASSERT(!IS_ARGS_KEYWORD(ci));
3912 VALUE *stack_bottom = reg_cfp->sp - 2;
3914 VM_ASSERT(calling->argc == 1);
3918 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, argv, stack_bottom);
3921 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat && !(vm_ci_flag(ci) & VM_CALL_FORWARDING));
3923 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3930 VALUE argv_ary = reg_cfp->sp[-1 - stack_offset];
3933 if (UNLIKELY(argc > VM_ARGC_STACK_MAX)) {
3934 return vm_call_cfunc_other(ec, reg_cfp, calling);
3938 calling->kw_splat = 0;
3940 VALUE *stack_bottom = reg_cfp->sp - 2 - stack_offset;
3941 VALUE *sp = stack_bottom;
3942 CHECK_VM_STACK_OVERFLOW(reg_cfp, argc);
3943 for(i = 0; i < argc; i++) {
3948 return vm_call_cfunc_with_frame_(ec, reg_cfp, calling, argc, stack_bottom+1, stack_bottom);
3954 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat);
3955 VALUE argv_ary = reg_cfp->sp[-1];
3959 int argc_offset = 0;
3961 if (UNLIKELY(argc > 0 &&
3963 (((
struct RHash *)last_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS))) {
3965 return vm_call_cfunc_other(ec, reg_cfp, calling);
3969 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 0, argc_offset);
3975 RB_DEBUG_COUNTER_INC(ccf_cfunc_only_splat_kw);
3976 VALUE keyword_hash = reg_cfp->sp[-1];
3979 return vm_call_cfunc_array_argv(ec, reg_cfp, calling, 1, 0);
3982 return vm_call_cfunc_other(ec, reg_cfp, calling);
3989 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3991 if (IS_ARGS_SPLAT(ci) && !(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
3992 if (!IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 1) {
3994 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat, TRUE);
3995 return vm_call_cfunc_only_splat(ec, reg_cfp, calling);
3997 if (IS_ARGS_KW_SPLAT(ci) && vm_ci_argc(ci) == 2) {
3999 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_only_splat_kw, TRUE);
4000 return vm_call_cfunc_only_splat_kw(ec, reg_cfp, calling);
4004 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_other, TRUE);
4005 return vm_call_cfunc_other(ec, reg_cfp, calling);
4012 RB_DEBUG_COUNTER_INC(ccf_ivar);
4014 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE,
Qnil);
4021 RB_DEBUG_COUNTER_INC(ccf_attrset);
4022 VALUE val = *(cfp->sp - 1);
4025 shape_id_t dest_shape_id;
4026 vm_cc_atomic_shape_and_index(cc, &dest_shape_id, &index);
4027 ID id = vm_cc_cme(cc)->def->body.attr.id;
4028 rb_check_frozen(obj);
4029 VALUE res = vm_setivar(obj,
id, val, dest_shape_id, index);
4038 res = vm_setivar_default(obj,
id, val, dest_shape_id, index);
4039 if (!UNDEF_P(res)) {
4044 res = vm_setivar_slowpath_attr(obj,
id, val, cc);
4052 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
4062 VALUE procv = cme->def->body.bmethod.proc;
4065 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4066 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4070 GetProcPtr(procv, proc);
4071 val = vm_invoke_bmethod(ec, proc, calling->recv, CALLING_ARGC(calling), argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
4081 RB_DEBUG_COUNTER_INC(ccf_iseq_bmethod);
4085 VALUE procv = cme->def->body.bmethod.proc;
4088 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
4089 rb_raise(
rb_eRuntimeError,
"defined with an un-shareable Proc in a different Ractor");
4093 GetProcPtr(procv, proc);
4094 const struct rb_block *block = &proc->block;
4096 while (vm_block_type(block) == block_type_proc) {
4097 block = vm_proc_block(block->as.proc);
4099 VM_ASSERT(vm_block_type(block) == block_type_iseq);
4102 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4103 VALUE *
const argv = cfp->sp - calling->argc;
4104 const int arg_size = ISEQ_BODY(iseq)->param.size;
4107 if (vm_ci_flag(calling->cd->ci) & VM_CALL_ARGS_SIMPLE) {
4108 opt_pc = vm_callee_setup_block_arg(ec, calling, calling->cd->ci, iseq, argv, arg_setup_method);
4111 opt_pc = setup_parameters_complex(ec, iseq, calling, calling->cd->ci, argv, arg_setup_method);
4116 vm_push_frame(ec, iseq,
4117 VM_FRAME_MAGIC_BLOCK | VM_FRAME_FLAG_BMETHOD | VM_FRAME_FLAG_LAMBDA,
4119 VM_GUARDED_PREV_EP(captured->ep),
4121 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4123 ISEQ_BODY(iseq)->local_table_size - arg_size,
4124 ISEQ_BODY(iseq)->stack_max);
4132 RB_DEBUG_COUNTER_INC(ccf_noniseq_bmethod);
4136 CALLER_SETUP_ARG(cfp, calling, calling->cd->ci, ALLOW_HEAP_ARGV);
4137 if (UNLIKELY(calling->heap_argv)) {
4142 argc = calling->argc;
4145 cfp->sp += - argc - 1;
4148 return vm_call_bmethod_body(ec, calling, argv);
4154 RB_DEBUG_COUNTER_INC(ccf_bmethod);
4158 VALUE procv = cme->def->body.bmethod.proc;
4160 GetProcPtr(procv, proc);
4161 const struct rb_block *block = &proc->block;
4163 while (vm_block_type(block) == block_type_proc) {
4164 block = vm_proc_block(block->as.proc);
4166 if (vm_block_type(block) == block_type_iseq) {
4167 CC_SET_FASTPATH(cc, vm_call_iseq_bmethod, TRUE);
4168 return vm_call_iseq_bmethod(ec, cfp, calling);
4171 CC_SET_FASTPATH(cc, vm_call_noniseq_bmethod, TRUE);
4172 return vm_call_noniseq_bmethod(ec, cfp, calling);
4176rb_find_defined_class_by_owner(
VALUE current_class,
VALUE target_owner)
4178 VALUE klass = current_class;
4186 while (
RTEST(klass)) {
4188 if (owner == target_owner) {
4194 return current_class;
4203 if (orig_me->defined_class == 0) {
4204 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
4205 VM_ASSERT_TYPE(orig_me->owner,
T_MODULE);
4206 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
4208 if (me->def->reference_count == 1) {
4209 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
4213 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
4221 VM_ASSERT(callable_method_entry_p(cme));
4228 return aliased_callable_method_entry(me);
4234 calling->cc = &VM_CC_ON_STACK(
Qundef,
4237 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
4239 return vm_call_method_each_type(ec, cfp, calling);
4242static enum method_missing_reason
4245 enum method_missing_reason stat = MISSING_NOENTRY;
4246 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4247 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
4248 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
4258 ASSUME(calling->argc >= 0);
4260 enum method_missing_reason missing_reason = MISSING_NOENTRY;
4261 int argc = calling->argc;
4262 VALUE recv = calling->recv;
4265 flags |= VM_CALL_OPT_SEND;
4267 if (UNLIKELY(! mid)) {
4268 mid = idMethodMissing;
4269 missing_reason = ci_missing_reason(ci);
4270 ec->method_missing_reason = missing_reason;
4273 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4274 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4275 rb_ary_unshift(argv_ary, symbol);
4278 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4279 VALUE exc = rb_make_no_method_exception(
4301 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4304 argc = ++calling->argc;
4306 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
4309 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
4310 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
4311 VALUE exc = rb_make_no_method_exception(
4324 .ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci)),
4330 if (!(vm_ci_flag(ci) & VM_CALL_FORWARDING)) {
4331 calling->cd = &new_fcd.cd;
4335 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4336 new_fcd.caller_ci = caller_ci;
4339 calling->cc = &VM_CC_ON_STACK(klass,
4341 { .method_missing_reason = missing_reason },
4342 rb_callable_method_entry_with_refinements(klass, mid, NULL));
4344 if (flags & VM_CALL_FCALL) {
4345 return vm_call_method(ec, reg_cfp, calling);
4349 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4351 if (vm_cc_cme(cc) != NULL) {
4352 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4353 case METHOD_VISI_PUBLIC:
4354 return vm_call_method_each_type(ec, reg_cfp, calling);
4355 case METHOD_VISI_PRIVATE:
4356 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
4358 case METHOD_VISI_PROTECTED:
4359 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4362 VM_UNREACHABLE(vm_call_method);
4364 return vm_call_method_missing(ec, reg_cfp, calling);
4367 return vm_call_method_nome(ec, reg_cfp, calling);
4377 i = calling->argc - 1;
4379 if (calling->argc == 0) {
4380 rb_raise(rb_eArgError,
"no method name given");
4404 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4410 RB_DEBUG_COUNTER_INC(ccf_opt_send_complex);
4412 int flags = VM_CALL_FCALL;
4416 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
4417 if (UNLIKELY(argv_ary = calling->heap_argv)) {
4419 flags |= VM_CALL_ARGS_SPLAT;
4420 if (calling->kw_splat) {
4421 VALUE last_hash = rb_ary_last(0, NULL, argv_ary);
4422 ((
struct RHash *)last_hash)->basic.flags |= RHASH_PASS_AS_KEYWORDS;
4423 calling->kw_splat = 0;
4425 return vm_call_symbol(ec, reg_cfp, calling, ci, sym, flags);
4428 if (calling->kw_splat) flags |= VM_CALL_KW_SPLAT;
4429 return vm_call_opt_send0(ec, reg_cfp, calling, flags);
4435 RB_DEBUG_COUNTER_INC(ccf_opt_send_simple);
4436 return vm_call_opt_send0(ec, reg_cfp, calling, vm_ci_flag(calling->cd->ci) | VM_CALL_FCALL);
4442 RB_DEBUG_COUNTER_INC(ccf_opt_send);
4445 int flags = vm_ci_flag(ci);
4447 if (UNLIKELY((flags & VM_CALL_FORWARDING) || (!(flags & VM_CALL_ARGS_SIMPLE) &&
4448 ((calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
4449 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
4450 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc)))))) {
4451 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_complex, TRUE);
4452 return vm_call_opt_send_complex(ec, reg_cfp, calling);
4455 CC_SET_FASTPATH(calling->cc, vm_call_opt_send_simple, TRUE);
4456 return vm_call_opt_send_simple(ec, reg_cfp, calling);
4461 const struct rb_callinfo *orig_ci,
enum method_missing_reason reason)
4463 RB_DEBUG_COUNTER_INC(ccf_method_missing);
4465 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4466 unsigned int argc, flag;
4468 flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | vm_ci_flag(orig_ci);
4469 argc = ++calling->argc;
4472 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
4473 vm_check_canary(ec, reg_cfp->sp);
4477 argv[0] =
ID2SYM(vm_ci_mid(orig_ci));
4480 ec->method_missing_reason = reason;
4484 .ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci)),
4490 if (!(flag & VM_CALL_FORWARDING)) {
4491 calling->cd = &new_fcd.cd;
4495 VM_ASSERT((vm_ci_argc(caller_ci), 1));
4496 new_fcd.caller_ci = caller_ci;
4500 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }},
4501 rb_callable_method_entry_without_refinements(
CLASS_OF(calling->recv), idMethodMissing, NULL));
4502 return vm_call_method(ec, reg_cfp, calling);
4508 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->cd->ci, vm_cc_cmethod_missing_reason(calling->cc));
4519 return vm_call_method_nome(ec, cfp, calling);
4521 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
4522 cme->def->body.refined.orig_me) {
4523 cme = refined_method_callable_without_refinement(cme);
4526 calling->cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, cme);
4528 return vm_call_method_each_type(ec, cfp, calling);
4532find_refinement(
VALUE refinements,
VALUE klass)
4534 if (
NIL_P(refinements)) {
4537 return rb_hash_lookup(refinements, klass);
4546 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
4547 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
4550 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
4551 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
4555 }
while (cfp->iseq != local_iseq);
4566 if (orig_me->defined_class == 0) {
4574 VM_ASSERT(callable_method_entry_p(cme));
4576 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
4586 ID mid = vm_ci_mid(calling->cd->ci);
4587 const rb_cref_t *cref = vm_get_cref(cfp->ep);
4591 for (; cref; cref = CREF_NEXT(cref)) {
4592 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
4593 if (
NIL_P(refinement))
continue;
4596 rb_callable_method_entry(refinement, mid);
4599 if (vm_cc_call(cc) == vm_call_super_method) {
4602 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
4607 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
4608 cme->def != ref_me->def) {
4611 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
4620 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
4621 return refined_method_callable_without_refinement(vm_cc_cme(cc));
4636 if (calling->cd->cc) {
4637 const struct rb_callcache *cc = calling->cc = vm_cc_new(vm_cc_cme(calling->cc)->defined_class, ref_cme, vm_call_general, cc_type_refinement);
4639 return vm_call_method(ec, cfp, calling);
4642 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(
Qundef, vm_call_general, {{ 0 }}, ref_cme);
4643 calling->cc= ref_cc;
4644 return vm_call_method(ec, cfp, calling);
4648 return vm_call_method_nome(ec, cfp, calling);
4654NOINLINE(
static VALUE
4662 int argc = calling->argc;
4665 if (argc > 0)
MEMMOVE(&TOPN(argc), &TOPN(argc-1),
VALUE, argc);
4668 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
4674 RB_DEBUG_COUNTER_INC(ccf_opt_call);
4677 VALUE procval = calling->recv;
4678 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
4684 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
4686 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
4689 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
4690 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
4693 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
4694 calling->cc = rb_vm_search_method_slowpath(ci,
CLASS_OF(calling->recv));
4695 return vm_call_general(ec, reg_cfp, calling);
4702 VALUE recv = calling->recv;
4705 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4706 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
4708 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4709 return internal_RSTRUCT_GET(recv,
off);
4715 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
4717 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
4725 VALUE recv = calling->recv;
4728 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
4729 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
4731 rb_check_frozen(recv);
4733 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
4734 internal_RSTRUCT_SET(recv,
off, val);
4742 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
4744 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
4752#define VM_CALL_METHOD_ATTR(var, func, nohook) \
4753 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
4754 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
4755 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
4757 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
4758 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
4769 switch (vm_cc_cme(cc)->def->body.optimized.type) {
4770 case OPTIMIZED_METHOD_TYPE_SEND:
4771 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
4772 return vm_call_opt_send(ec, cfp, calling);
4773 case OPTIMIZED_METHOD_TYPE_CALL:
4774 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
4775 return vm_call_opt_call(ec, cfp, calling);
4776 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
4777 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
4778 return vm_call_opt_block_call(ec, cfp, calling);
4779 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF: {
4780 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4784 VM_CALL_METHOD_ATTR(v,
4785 vm_call_opt_struct_aref(ec, cfp, calling),
4786 set_vm_cc_ivar(cc); \
4787 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4790 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET: {
4791 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4795 VM_CALL_METHOD_ATTR(v,
4796 vm_call_opt_struct_aset(ec, cfp, calling),
4797 set_vm_cc_ivar(cc); \
4798 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE)))
4802 rb_bug(
"vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
4814 VM_ASSERT(! METHOD_ENTRY_INVALIDATED(cme));
4816 switch (cme->def->type) {
4817 case VM_METHOD_TYPE_ISEQ:
4818 if (ISEQ_BODY(def_iseq_ptr(cme->def))->param.flags.forwardable) {
4819 CC_SET_FASTPATH(cc, vm_call_iseq_fwd_setup, TRUE);
4820 return vm_call_iseq_fwd_setup(ec, cfp, calling);
4823 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
4824 return vm_call_iseq_setup(ec, cfp, calling);
4827 case VM_METHOD_TYPE_NOTIMPLEMENTED:
4828 case VM_METHOD_TYPE_CFUNC:
4829 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
4830 return vm_call_cfunc(ec, cfp, calling);
4832 case VM_METHOD_TYPE_ATTRSET:
4833 CALLER_SETUP_ARG(cfp, calling, ci, 1);
4837 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG | VM_CALL_FORWARDING);
4839 if (vm_cc_markable(cc)) {
4840 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4841 VM_CALL_METHOD_ATTR(v,
4842 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4843 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4849 VM_CALLCACHE_UNMARKABLE |
4850 VM_CALLCACHE_ON_STACK,
4856 .value = vm_pack_shape_and_index(INVALID_SHAPE_ID, ATTR_INDEX_NOT_SET),
4861 VM_CALL_METHOD_ATTR(v,
4862 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
4863 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
4867 case VM_METHOD_TYPE_IVAR:
4868 CALLER_SETUP_ARG(cfp, calling, ci, 0);
4870 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
4871 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_FORWARDING);
4872 VM_CALL_METHOD_ATTR(v,
4873 vm_call_ivar(ec, cfp, calling),
4874 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
4877 case VM_METHOD_TYPE_MISSING:
4878 vm_cc_method_missing_reason_set(cc, 0);
4879 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4880 return vm_call_method_missing(ec, cfp, calling);
4882 case VM_METHOD_TYPE_BMETHOD:
4883 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
4884 return vm_call_bmethod(ec, cfp, calling);
4886 case VM_METHOD_TYPE_ALIAS:
4887 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4888 return vm_call_alias(ec, cfp, calling);
4890 case VM_METHOD_TYPE_OPTIMIZED:
4891 return vm_call_optimized(ec, cfp, calling, ci, cc);
4893 case VM_METHOD_TYPE_UNDEF:
4896 case VM_METHOD_TYPE_ZSUPER:
4897 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4899 case VM_METHOD_TYPE_REFINED:
4902 return vm_call_refined(ec, cfp, calling);
4905 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4915 const int stat = ci_missing_reason(ci);
4917 if (vm_ci_mid(ci) == idMethodMissing) {
4918 if (UNLIKELY(calling->heap_argv)) {
4923 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4924 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4928 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4940 VALUE defined_class = me->defined_class;
4941 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4942 return NIL_P(refined_class) ? defined_class : refined_class;
4951 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4953 if (vm_cc_cme(cc) != NULL) {
4954 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4955 case METHOD_VISI_PUBLIC:
4956 return vm_call_method_each_type(ec, cfp, calling);
4958 case METHOD_VISI_PRIVATE:
4959 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4960 enum method_missing_reason stat = MISSING_PRIVATE;
4961 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4963 vm_cc_method_missing_reason_set(cc, stat);
4964 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4965 return vm_call_method_missing(ec, cfp, calling);
4967 return vm_call_method_each_type(ec, cfp, calling);
4969 case METHOD_VISI_PROTECTED:
4970 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4971 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4973 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4974 return vm_call_method_missing(ec, cfp, calling);
4978 VM_ASSERT(vm_cc_cme(cc) != NULL);
4981 calling->cc = &cc_on_stack;
4982 return vm_call_method_each_type(ec, cfp, calling);
4985 return vm_call_method_each_type(ec, cfp, calling);
4988 rb_bug(
"unreachable");
4992 return vm_call_method_nome(ec, cfp, calling);
4999 RB_DEBUG_COUNTER_INC(ccf_general);
5000 return vm_call_method(ec, reg_cfp, calling);
5006 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
5007 VM_ASSERT(cc != vm_cc_empty());
5009 *(vm_call_handler *)&cc->call_ = vm_call_general;
5015 RB_DEBUG_COUNTER_INC(ccf_super_method);
5020 if (ec == NULL) rb_bug(
"unreachable");
5023 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
5024 return vm_call_method(ec, reg_cfp, calling);
5030vm_search_normal_superclass(
VALUE klass)
5035 klass =
RBASIC(klass)->klass;
5037 klass = RCLASS_ORIGIN(klass);
5041NORETURN(
static void vm_super_outside(
void));
5044vm_super_outside(
void)
5050empty_cc_for_super(
void)
5052 return &vm_empty_cc_for_super;
5058 VALUE current_defined_class;
5065 current_defined_class = vm_defined_class_for_protected_call(me);
5068 reg_cfp->iseq != method_entry_iseqptr(me) &&
5071 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
5075 "self has wrong type to call super in this context: "
5076 "%"PRIsVALUE
" (expected %"PRIsVALUE
")",
5081 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
5083 "implicit argument passing of super from method defined"
5084 " by define_method() is not supported."
5085 " Specify all arguments explicitly.");
5088 ID mid = me->def->original_id;
5090 if (!vm_ci_markable(cd->ci)) {
5091 VM_FORCE_WRITE((
const VALUE *)&cd->ci->mid, (
VALUE)mid);
5095 cd->ci = vm_ci_new_runtime(mid,
5098 vm_ci_kwarg(cd->ci));
5105 VALUE klass = vm_search_normal_superclass(me->defined_class);
5109 cc = vm_cc_new(klass, NULL, vm_call_method_missing, cc_type_super);
5113 cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd, klass);
5117 if (cached_cme == NULL) {
5119 cd->cc = empty_cc_for_super();
5121 else if (cached_cme->called_id != mid) {
5124 cc = vm_cc_new(klass, cme, vm_call_super_method, cc_type_super);
5128 cd->cc = cc = empty_cc_for_super();
5132 switch (cached_cme->def->type) {
5134 case VM_METHOD_TYPE_REFINED:
5136 case VM_METHOD_TYPE_ATTRSET:
5137 case VM_METHOD_TYPE_IVAR:
5138 vm_cc_call_set(cc, vm_call_super_method);
5146 VM_ASSERT((vm_cc_cme(cc),
true));
5154block_proc_is_lambda(
const VALUE procval)
5159 GetProcPtr(procval, proc);
5160 return proc->is_lambda;
5168block_proc_namespace(
const VALUE procval)
5173 GetProcPtr(procval, proc);
5184 VALUE self,
int argc,
const VALUE *argv,
int kw_splat,
VALUE block_handler,
5187 int is_lambda = FALSE;
5188 VALUE val, arg, blockarg;
5190 const struct vm_ifunc *ifunc = captured->code.ifunc;
5195 else if (argc == 0) {
5202 blockarg = rb_vm_bh_to_procval(ec, block_handler);
5204 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
5206 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
5209 vm_push_frame(ec, (
const rb_iseq_t *)captured->code.ifunc,
5212 VM_GUARDED_PREV_EP(captured->ep),
5214 0, ec->cfp->sp, 0, 0);
5215 val = (*ifunc->func)(arg, (
VALUE)ifunc->data, argc, argv, blockarg);
5216 rb_vm_pop_frame(ec);
5224 return vm_yield_with_cfunc(ec, captured, captured->self, argc, argv, 0, VM_BLOCK_HANDLER_NONE, NULL);
5230 return rb_sym_proc_call(
SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
5239 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5241 for (i=0; i<
len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
5249vm_callee_setup_block_arg_arg0_check(
VALUE *argv)
5251 VALUE ary, arg0 = argv[0];
5256 VM_ASSERT(argv[0] == arg0);
5264 if (rb_simple_iseq_p(iseq)) {
5268 CALLER_SETUP_ARG(cfp, calling, ci, ISEQ_BODY(iseq)->param.lead_num);
5270 if (arg_setup_type == arg_setup_block &&
5271 calling->argc == 1 &&
5272 ISEQ_BODY(iseq)->param.flags.has_lead &&
5273 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
5274 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
5275 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
5278 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
5279 if (arg_setup_type == arg_setup_block) {
5280 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
5282 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
5283 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] =
Qnil;
5284 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5286 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
5287 calling->argc = ISEQ_BODY(iseq)->param.lead_num;
5291 argument_arity_error(ec, iseq, NULL, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
5298 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
5307 calling = &calling_entry;
5308 calling->argc = argc;
5309 calling->block_handler = block_handler;
5310 calling->kw_splat = (flags & VM_CALL_KW_SPLAT) ? 1 : 0;
5312 calling->heap_argv = 0;
5314 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, flags, 0, 0);
5316 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
5324 bool is_lambda,
VALUE block_handler)
5327 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
5328 const int arg_size = ISEQ_BODY(iseq)->param.size;
5329 VALUE *
const rsp = GET_SP() - calling->argc;
5330 VALUE *
const argv = rsp;
5331 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, argv, is_lambda ? arg_setup_method : arg_setup_block);
5332 int frame_flag = VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0);
5336 if (calling->proc_ns) {
5337 frame_flag |= VM_FRAME_FLAG_NS_SWITCH;
5340 vm_push_frame(ec, iseq,
5343 VM_GUARDED_PREV_EP(captured->ep), 0,
5344 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
5346 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
5354 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5356 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
5357 int flags = vm_ci_flag(ci);
5359 if (UNLIKELY(!(flags & VM_CALL_ARGS_SIMPLE) &&
5360 ((calling->argc == 0) ||
5361 (calling->argc == 1 && (flags & (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT))) ||
5362 (calling->argc == 2 && (flags & VM_CALL_ARGS_SPLAT) && (flags & VM_CALL_KW_SPLAT)) ||
5363 ((flags & VM_CALL_KWARG) && (vm_ci_kwarg(ci)->keyword_len == calling->argc))))) {
5364 CALLER_SETUP_ARG(reg_cfp, calling, ci, ALLOW_HEAP_ARGV);
5366 if (UNLIKELY(calling->heap_argv)) {
5367#if VM_ARGC_STACK_MAX < 0
5369 rb_raise(rb_eArgError,
"no receiver given");
5375 reg_cfp->sp[-1] = reg_cfp->sp[-2];
5376 reg_cfp->sp[-2] = calling->recv;
5377 flags |= VM_CALL_ARGS_SPLAT;
5380 if (calling->argc < 1) {
5381 rb_raise(rb_eArgError,
"no receiver given");
5383 calling->recv = TOPN(--calling->argc);
5385 if (calling->kw_splat) {
5386 flags |= VM_CALL_KW_SPLAT;
5390 if (calling->argc < 1) {
5391 rb_raise(rb_eArgError,
"no receiver given");
5393 calling->recv = TOPN(--calling->argc);
5396 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, flags);
5402 MAYBE_UNUSED(
bool is_lambda),
VALUE block_handler)
5407 CALLER_SETUP_ARG(ec->cfp, calling, ci, ALLOW_HEAP_ARGV_KEEP_KWSPLAT);
5408 argc = calling->argc;
5409 val = vm_yield_with_cfunc(ec, captured, captured->self, CALLING_ARGC(calling), calling->heap_argv ?
RARRAY_CONST_PTR(calling->heap_argv) : STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
5415vm_proc_to_block_handler(
VALUE procval)
5417 const struct rb_block *block = vm_proc_block(procval);
5419 switch (vm_block_type(block)) {
5420 case block_type_iseq:
5421 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
5422 case block_type_ifunc:
5423 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
5424 case block_type_symbol:
5425 return VM_BH_FROM_SYMBOL(block->as.symbol);
5426 case block_type_proc:
5427 return VM_BH_FROM_PROC(block->as.proc);
5429 VM_UNREACHABLE(vm_yield_with_proc);
5436 bool is_lambda,
VALUE block_handler)
5438 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
5439 VALUE proc = VM_BH_TO_PROC(block_handler);
5440 if (!calling->proc_ns) {
5441 calling->proc_ns = block_proc_namespace(proc);
5443 is_lambda = block_proc_is_lambda(proc);
5444 block_handler = vm_proc_to_block_handler(proc);
5447 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5453 bool is_lambda,
VALUE block_handler)
5457 bool is_lambda,
VALUE block_handler);
5459 switch (vm_block_handler_type(block_handler)) {
5460 case block_handler_type_iseq: func = vm_invoke_iseq_block;
break;
5461 case block_handler_type_ifunc: func = vm_invoke_ifunc_block;
break;
5462 case block_handler_type_proc: func = vm_invoke_proc_block;
break;
5463 case block_handler_type_symbol: func = vm_invoke_symbol_block;
break;
5464 default: rb_bug(
"vm_invoke_block: unreachable");
5467 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
5471vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
5478 rb_bug(
"vm_make_proc_with_iseq: unreachable");
5481 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
5482 captured->code.iseq = blockiseq;
5484 return rb_vm_make_proc(ec, captured,
rb_cProc);
5488vm_once_exec(
VALUE iseq)
5495vm_once_clear(
VALUE data)
5498 is->once.running_thread = NULL;
5510 args[0] = obj; args[1] =
Qfalse;
5512 if (!UNDEF_P(r) &&
RTEST(r)) {
5524 enum defined_type
type = (
enum defined_type)op_type;
5531 return rb_gvar_defined(
SYM2ID(obj));
5533 case DEFINED_CVAR: {
5534 const rb_cref_t *cref = vm_get_cref(GET_EP());
5535 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
5540 case DEFINED_CONST_FROM: {
5541 bool allow_nil =
type == DEFINED_CONST;
5543 return vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true);
5548 return rb_ec_obj_respond_to(ec, v,
SYM2ID(obj), TRUE);
5550 case DEFINED_METHOD:{
5555 switch (METHOD_ENTRY_VISI(me)) {
5556 case METHOD_VISI_PRIVATE:
5558 case METHOD_VISI_PROTECTED:
5562 case METHOD_VISI_PUBLIC:
5566 rb_bug(
"vm_defined: unreachable: %u", (
unsigned int)METHOD_ENTRY_VISI(me));
5570 return check_respond_to_missing(obj, v);
5575 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
5579 case DEFINED_ZSUPER:
5584 VALUE klass = vm_search_normal_superclass(me->defined_class);
5585 if (!klass)
return false;
5587 ID id = me->def->original_id;
5594 return RTEST(vm_backref_defined(ec, GET_LEP(),
FIX2INT(obj)));
5596 rb_bug(
"unimplemented defined? type (VM)");
5606 return vm_defined(ec, reg_cfp, op_type, obj, v);
5610vm_get_ep(
const VALUE *
const reg_ep, rb_num_t lv)
5613 const VALUE *ep = reg_ep;
5614 for (i = 0; i < lv; i++) {
5615 ep = GET_PREV_EP(ep);
5621vm_get_special_object(
const VALUE *
const reg_ep,
5622 enum vm_special_object_type
type)
5625 case VM_SPECIAL_OBJECT_VMCORE:
5626 return rb_mRubyVMFrozenCore;
5627 case VM_SPECIAL_OBJECT_CBASE:
5628 return vm_get_cbase(reg_ep);
5629 case VM_SPECIAL_OBJECT_CONST_BASE:
5630 return vm_get_const_base(reg_ep);
5632 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
5639rb_vm_get_special_object(
const VALUE *reg_ep,
enum vm_special_object_type
type)
5641 return vm_get_special_object(reg_ep,
type);
5647 const VALUE ary2 = ary2st;
5648 VALUE tmp1 = rb_check_to_array(ary1);
5649 VALUE tmp2 = rb_check_to_array(ary2);
5670 const VALUE ary2 = ary2st;
5672 if (
NIL_P(ary2))
return ary1;
5674 VALUE tmp2 = rb_check_to_array(ary2);
5689 return vm_concat_array(ary1, ary2st);
5693rb_vm_concat_to_array(
VALUE ary1,
VALUE ary2st)
5695 return vm_concat_to_array(ary1, ary2st);
5704 VALUE tmp = rb_check_to_array(ary);
5708 else if (
RTEST(flag)) {
5721 return vm_splat_array(flag, ary);
5727 enum vm_check_match_type
type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
5729 if (flag & VM_CHECKMATCH_ARRAY) {
5733 for (i = 0; i < n; i++) {
5735 VALUE c = check_match(ec, v, target,
type);
5744 return check_match(ec, pattern, target,
type);
5751 return vm_check_match(ec, target, pattern, flag);
5755vm_check_keyword(lindex_t bits, lindex_t idx,
const VALUE *ep)
5757 const VALUE kw_bits = *(ep - bits);
5760 unsigned int b = (
unsigned int)
FIX2ULONG(kw_bits);
5761 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
5774 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
5775 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
5776 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
5777 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
5781 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
5784 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
5787 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
5790 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
5797vm_const_get_under(
ID id, rb_num_t flags,
VALUE cbase)
5802 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
5803 return rb_public_const_get_at(cbase,
id);
5811vm_check_if_class(
ID id, rb_num_t flags,
VALUE super,
VALUE klass)
5816 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
5821 "superclass mismatch for class %"PRIsVALUE
"",
5834vm_check_if_module(
ID id,
VALUE mod)
5853vm_declare_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5856 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
5864vm_declare_module(
ID id,
VALUE cbase)
5870NORETURN(
static void unmatched_redefinition(
const char *
type,
VALUE cbase,
ID id,
VALUE old));
5874 VALUE name = rb_id2str(
id);
5875 VALUE message = rb_sprintf(
"%"PRIsVALUE
" is not a %s",
5877 VALUE location = rb_const_source_location_at(cbase,
id);
5878 if (!
NIL_P(location)) {
5879 rb_str_catf(message,
"\n%"PRIsVALUE
":%"PRIsVALUE
":"
5880 " previous definition of %"PRIsVALUE
" was here",
5887vm_define_class(
ID id, rb_num_t flags,
VALUE cbase,
VALUE super)
5891 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !
RB_TYPE_P(super,
T_CLASS)) {
5893 "superclass must be an instance of Class (given an instance of %"PRIsVALUE
")",
5897 vm_check_if_namespace(cbase);
5902 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
5903 if (!vm_check_if_class(
id, flags, super, klass))
5904 unmatched_redefinition(
"class", cbase,
id, klass);
5908 return vm_declare_class(
id, flags, cbase, super);
5913vm_define_module(
ID id, rb_num_t flags,
VALUE cbase)
5917 vm_check_if_namespace(cbase);
5918 if ((mod = vm_const_get_under(
id, flags, cbase)) != 0) {
5919 if (!vm_check_if_module(
id, mod))
5920 unmatched_redefinition(
"module", cbase,
id, mod);
5924 return vm_declare_module(
id, cbase);
5929vm_find_or_create_class_by_id(
ID id,
5934 rb_vm_defineclass_type_t
type = VM_DEFINECLASS_TYPE(flags);
5937 case VM_DEFINECLASS_TYPE_CLASS:
5939 return vm_define_class(
id, flags, cbase, super);
5941 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
5945 case VM_DEFINECLASS_TYPE_MODULE:
5947 return vm_define_module(
id, flags, cbase);
5950 rb_bug(
"unknown defineclass type: %d", (
int)
type);
5954static rb_method_visibility_t
5959 if (!vm_env_cref_by_cref(cfp->ep)) {
5960 return METHOD_VISI_PUBLIC;
5963 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
5972 if (!vm_env_cref_by_cref(cfp->ep)) {
5976 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
5984 rb_method_visibility_t visi;
5989 visi = METHOD_VISI_PUBLIC;
5992 klass = CREF_CLASS_FOR_DEFINITION(cref);
5993 visi = vm_scope_visibility_get(ec);
6000 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, visi);
6003 RCLASS_SET_MAX_IV_COUNT(klass, rb_estimate_iv_count(klass, (
const rb_iseq_t *)iseqval));
6006 if (!is_singleton && vm_scope_module_func_check(ec)) {
6008 rb_add_method_iseq(klass,
id, (
const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
6018 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
6020 if (block_handler == VM_BLOCK_HANDLER_NONE) {
6021 rb_vm_localjump_error(
"no block given (yield)",
Qnil, 0);
6024 return vm_invoke_block(ec, GET_CFP(), calling, ci,
false, block_handler);
6028enum method_explorer_type {
6030 mexp_search_invokeblock,
6039 VALUE block_handler,
6040 enum method_explorer_type method_explorer
6045 int argc = vm_ci_argc(ci);
6046 VALUE recv = TOPN(argc);
6048 .block_handler = block_handler,
6049 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
6055 switch (method_explorer) {
6056 case mexp_search_method:
6057 calling.cc = cc = vm_search_method_fastpath((
VALUE)reg_cfp->iseq, cd,
CLASS_OF(recv));
6058 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6060 case mexp_search_super:
6061 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
6062 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
6064 case mexp_search_invokeblock:
6065 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
6082 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6083 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
false, &adjusted_cd, &adjusted_ci);
6085 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_method);
6087 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6092 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
false);
6093 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6104 VALUE bh = VM_BLOCK_HANDLER_NONE;
6105 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_method);
6120 if (vm_ci_flag(cd->ci) & VM_CALL_FORWARDING) {
6121 bh = vm_caller_setup_fwd_args(GET_EC(), GET_CFP(), cd, blockiseq,
true, &adjusted_cd, &adjusted_ci);
6123 val = vm_sendish(ec, GET_CFP(), &adjusted_cd.cd, bh, mexp_search_super);
6125 if (cd->cc != adjusted_cd.cd.cc && vm_cc_markable(adjusted_cd.cd.cc)) {
6130 bh = vm_caller_setup_arg_block(ec, GET_CFP(), cd->ci, blockiseq,
true);
6131 val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_super);
6142 VALUE bh = VM_BLOCK_HANDLER_NONE;
6143 VALUE val = vm_sendish(ec, GET_CFP(), cd, bh, mexp_search_invokeblock);
6171 if (check_method_basic_definition(vm_cc_cme(cc))) {
6180 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
6186 val = rb_mod_to_s(recv);
6192 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
6193 return rb_nil_to_s(recv);
6197 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
6198 return rb_true_to_s(recv);
6202 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
6203 return rb_false_to_s(recv);
6207 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
6208 return rb_fix_to_s(recv);
6216vm_opt_ary_freeze(
VALUE ary,
int bop,
ID id)
6218 if (BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
6227vm_opt_hash_freeze(
VALUE hash,
int bop,
ID id)
6229 if (BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
6238vm_opt_str_freeze(
VALUE str,
int bop,
ID id)
6240 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
6254 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6258 VALUE args[1] = {target};
6261 RUBY_DTRACE_CREATE_HOOK(ARRAY,
RARRAY_LEN(ary));
6264 return rb_vm_call_with_refinements(ec, dupary, idIncludeP, 1, args,
RB_NO_KEYWORDS);
6271 return vm_opt_duparray_include_p(ec, ary, target);
6277 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
6282 VALUE result = *ptr;
6283 rb_snum_t i = num - 1;
6285 const VALUE v = *++ptr;
6286 if (OPTIMIZED_CMP(v, result) > 0) {
6301 return vm_opt_newarray_max(ec, num, ptr);
6307 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
6312 VALUE result = *ptr;
6313 rb_snum_t i = num - 1;
6315 const VALUE v = *++ptr;
6316 if (OPTIMIZED_CMP(v, result) < 0) {
6331 return vm_opt_newarray_min(ec, num, ptr);
6338 if (BASIC_OP_UNREDEFINED_P(BOP_HASH, ARRAY_REDEFINED_OP_FLAG)) {
6339 return rb_ary_hash_values(num, ptr);
6349 return vm_opt_newarray_hash(ec, num, ptr);
6358 if (BASIC_OP_UNREDEFINED_P(BOP_INCLUDE_P, ARRAY_REDEFINED_OP_FLAG)) {
6360 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6364 VALUE args[1] = {target};
6372 return vm_opt_newarray_include_p(ec, num,
ptr, target);
6378 if (BASIC_OP_UNREDEFINED_P(BOP_PACK, ARRAY_REDEFINED_OP_FLAG)) {
6380 VALUE ary = rb_setup_fake_ary(&fake_ary,
ptr, num);
6381 return rb_ec_pack_ary(ec,
ary, fmt, (UNDEF_P(buffer) ?
Qnil : buffer));
6391 if (!UNDEF_P(buffer)) {
6392 args[1] = rb_hash_new_with_size(1);
6393 rb_hash_aset(args[1],
ID2SYM(idBuffer), buffer);
6398 return rb_vm_call_with_refinements(ec,
rb_ary_new4(num,
ptr), idPack, argc, args, kw_splat);
6405 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt, buffer);
6411 return vm_opt_newarray_pack_buffer(ec, num,
ptr, fmt,
Qundef);
6417vm_track_constant_cache(
ID id,
void *ic)
6420 struct rb_id_table *const_cache = vm->constant_cache;
6421 VALUE lookup_result;
6424 if (rb_id_table_lookup(const_cache,
id, &lookup_result)) {
6428 ics = set_init_numtable();
6429 rb_id_table_insert(const_cache,
id, (
VALUE)ics);
6444 vm->inserting_constant_cache_id = id;
6446 set_insert(ics, (st_data_t)ic);
6448 vm->inserting_constant_cache_id = (
ID)0;
6455 for (
int i = 0; segments[i]; i++) {
6456 ID id = segments[i];
6457 if (
id == idNULL)
continue;
6458 vm_track_constant_cache(
id, ic);
6467 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
6468 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
6470 return (ic_cref == NULL ||
6471 ic_cref == vm_get_cref(reg_ep));
6479 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
6480 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
6485rb_vm_ic_hit_p(
IC ic,
const VALUE *reg_ep)
6487 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
6493 if (ruby_vm_const_missing_count > 0) {
6494 ruby_vm_const_missing_count = 0;
6501 ice->ic_cref = vm_get_const_key_cref(reg_ep);
6506 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
6507 rb_yjit_constant_ic_update(iseq, ic, pos);
6516 if (ice && vm_ic_hit_p(ice, GET_EP())) {
6519 VM_ASSERT(val == vm_get_ev_const_chain(ec, segments));
6522 ruby_vm_constant_cache_misses++;
6523 val = vm_get_ev_const_chain(ec, segments);
6524 vm_ic_track_const_chain(GET_CFP(), ic, segments);
6527 vm_ic_update(GET_ISEQ(), ic, val, GET_EP(), GET_PC() - 2);
6539 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
6540 return is->once.value;
6542 else if (is->once.running_thread == NULL) {
6544 is->once.running_thread = th;
6548 is->once.running_thread = RUNNING_THREAD_ONCE_DONE;
6551 else if (is->once.running_thread == th) {
6553 return vm_once_exec((
VALUE)iseq);
6557 RUBY_VM_CHECK_INTS(ec);
6564vm_case_dispatch(CDHASH hash, OFFSET else_offset,
VALUE key)
6566 switch (OBJ_BUILTIN_TYPE(key)) {
6572 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
6573 SYMBOL_REDEFINED_OP_FLAG |
6574 INTEGER_REDEFINED_OP_FLAG |
6575 FLOAT_REDEFINED_OP_FLAG |
6576 NIL_REDEFINED_OP_FLAG |
6577 TRUE_REDEFINED_OP_FLAG |
6578 FALSE_REDEFINED_OP_FLAG |
6579 STRING_REDEFINED_OP_FLAG)) {
6583 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
6587 if (rb_hash_stlike_lookup(hash, key, &val)) {
6607 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
6608 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
6609 static const char stack_consistency_error[] =
6610 "Stack consistency error (sp: %"PRIdPTRDIFF
", bp: %"PRIdPTRDIFF
")";
6611#if defined RUBY_DEVEL
6612 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
6617 rb_bug(stack_consistency_error, nsp, nbp);
6624 if (FIXNUM_2_P(recv, obj) &&
6625 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
6626 return rb_fix_plus_fix(recv, obj);
6628 else if (FLONUM_2_P(recv, obj) &&
6629 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6637 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
6642 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
6643 return rb_str_opt_plus(recv, obj);
6647 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
6658 if (FIXNUM_2_P(recv, obj) &&
6659 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
6660 return rb_fix_minus_fix(recv, obj);
6662 else if (FLONUM_2_P(recv, obj) &&
6663 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6671 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
6682 if (FIXNUM_2_P(recv, obj) &&
6683 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
6684 return rb_fix_mul_fix(recv, obj);
6686 else if (FLONUM_2_P(recv, obj) &&
6687 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6695 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
6706 if (FIXNUM_2_P(recv, obj) &&
6707 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
6708 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
6710 else if (FLONUM_2_P(recv, obj) &&
6711 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6712 return rb_flo_div_flo(recv, obj);
6719 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
6720 return rb_flo_div_flo(recv, obj);
6730 if (FIXNUM_2_P(recv, obj) &&
6731 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
6732 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
6734 else if (FLONUM_2_P(recv, obj) &&
6735 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6743 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
6754 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
6755 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
6757 if (!UNDEF_P(val)) {
6758 return RBOOL(!
RTEST(val));
6768 if (FIXNUM_2_P(recv, obj) &&
6769 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
6772 else if (FLONUM_2_P(recv, obj) &&
6773 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6781 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
6793 if (FIXNUM_2_P(recv, obj) &&
6794 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
6797 else if (FLONUM_2_P(recv, obj) &&
6798 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6806 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
6818 if (FIXNUM_2_P(recv, obj) &&
6819 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
6822 else if (FLONUM_2_P(recv, obj) &&
6823 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6831 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
6843 if (FIXNUM_2_P(recv, obj) &&
6844 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
6847 else if (FLONUM_2_P(recv, obj) &&
6848 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6856 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
6873 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
6882 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
6900 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
6911 if (FIXNUM_2_P(recv, obj) &&
6912 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
6924 if (FIXNUM_2_P(recv, obj) &&
6925 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
6926 return rb_fix_aref(recv, obj);
6931 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
6933 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
6936 return rb_ary_aref1(recv, obj);
6940 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
6941 return rb_hash_aref(recv, obj);
6955 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
6961 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
6962 rb_hash_aset(recv, obj, set);
6974 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
6975 rb_hash_compare_by_id_p(recv) ==
Qfalse &&
6976 !
FL_TEST(recv, RHASH_PROC_DEFAULT)) {
6977 return rb_hash_aref(recv, key);
6987 return vm_opt_aref_with(recv, key);
6994 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
6995 rb_hash_compare_by_id_p(recv) ==
Qfalse) {
6996 return rb_hash_aset(recv, key, val);
7006 return vm_opt_aset_with(recv, key, value);
7010vm_opt_length(
VALUE recv,
int bop)
7016 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
7017 if (bop == BOP_EMPTY_P) {
7018 return LONG2NUM(RSTRING_LEN(recv));
7025 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
7029 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
7038vm_opt_empty_p(
VALUE recv)
7040 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
7053 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
7056 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
7072 case RSHIFT(~0UL, 1):
7075 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
7093vm_opt_succ(
VALUE recv)
7096 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
7097 return fix_succ(recv);
7103 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
7114 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
7115 return RBOOL(!
RTEST(recv));
7130 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
7134 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
7152 VALUE self = GET_SELF();
7154 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
7156 if (event & global_hooks->events) {
7159 vm_dtrace(event, ec);
7160 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
7166 if (local_hooks != NULL) {
7167 if (event & local_hooks->events) {
7170 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
7176#define VM_TRACE_HOOK(target_event, val) do { \
7177 if ((pc_events & (target_event)) & enabled_flags) { \
7178 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
7185 VM_ASSERT(VM_FRAME_RUBYFRAME_P(cfp));
7186 VM_ASSERT(ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_RESCUE);
7187 return cfp->ep[VM_ENV_INDEX_LAST_LVAR];
7193 const VALUE *pc = reg_cfp->pc;
7194 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
7197 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
7203 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
7206 rb_hook_list_t *
const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
7207 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
7211 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
7212 enabled_flags |= iseq_local_events;
7214 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
7216 if (bmethod_frame) {
7218 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
7219 bmethod_local_hooks = me->def->body.bmethod.hooks;
7220 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
7221 if (bmethod_local_hooks) {
7222 bmethod_local_events = bmethod_local_hooks->events;
7227 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
7231 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
7239 else if (ec->trace_arg != NULL) {
7247 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
7250 ruby_debug_printf(
"vm_trace>>%4d (%4x) - %s:%d %s\n",
7253 RSTRING_PTR(rb_iseq_path(iseq)),
7254 (
int)rb_iseq_line_no(iseq, pos),
7255 RSTRING_PTR(rb_iseq_label(iseq)));
7257 VM_ASSERT(reg_cfp->pc == pc);
7258 VM_ASSERT(pc_events != 0);
7268 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE,
Qundef);
7269 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH,
Qundef);
7287#if VM_CHECK_MODE > 0
7288NORETURN( NOINLINE( COLDFUNC
7289void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
7292Init_vm_stack_canary(
void)
7295 int n = ruby_fill_random_bytes(&vm_stack_canary,
sizeof vm_stack_canary,
false);
7296 vm_stack_canary |= 0x01;
7298 vm_stack_canary_was_born =
true;
7303rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
7307 const char *insn = rb_insns_name(i);
7311 rb_bug(
"dead canary found at %s: %s", insn, str);
7315void Init_vm_stack_canary(
void) { }
7347 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
7354 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
7361 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
7368 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
7375 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
7382 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
7389 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
7396 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
7403 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
7409 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
7410 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
7416 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
7417 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
7423 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
7424 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
7430 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
7431 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
7437 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
7438 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
7444 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
7445 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
7451 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
7452 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
7457static builtin_invoker
7458lookup_builtin_invoker(
int argc)
7460 static const builtin_invoker invokers[] = {
7479 return invokers[argc];
7485 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_attrs & BUILTIN_ATTR_LEAF;
7486 SETUP_CANARY(canary_p);
7487 rb_insn_func_t func_ptr = (rb_insn_func_t)(uintptr_t)bf->func_ptr;
7488 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, func_ptr);
7489 CHECK_CANARY(canary_p, BIN(invokebuiltin));
7496 return invoke_bf(ec, cfp, bf, argv);
7503 fputs(
"vm_invoke_builtin_delegate: passing -> ", stderr);
7504 for (
int i=0; i<bf->argc; i++) {
7505 ruby_debug_printf(
":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
7507 ruby_debug_printf(
"\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc,
7508 (
void *)(uintptr_t)bf->func_ptr);
7511 if (bf->argc == 0) {
7512 return invoke_bf(ec, cfp, bf, NULL);
7515 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
7516 return invoke_bf(ec, cfp, bf, argv);
7526 return cfp->ep[index];
#define RUBY_ASSERT(...)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
#define RUBY_EVENT_END
Encountered an end of a class clause.
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
#define RUBY_EVENT_CLASS
Encountered a new class.
#define RUBY_EVENT_LINE
Encountered a new line.
#define RUBY_EVENT_RETURN
Encountered a return statement.
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
uint32_t rb_event_flag_t
Represents event(s).
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
#define RUBY_EVENT_RESCUE
Encountered a rescue statement.
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
VALUE rb_module_new(void)
Creates a new, anonymous module.
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
#define TYPE(_)
Old name of rb_type.
#define RFLOAT_VALUE
Old name of rb_float_value.
#define T_STRING
Old name of RUBY_T_STRING.
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
#define T_NIL
Old name of RUBY_T_NIL.
#define T_FLOAT
Old name of RUBY_T_FLOAT.
#define T_IMEMO
Old name of RUBY_T_IMEMO.
#define ID2SYM
Old name of RB_ID2SYM.
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
#define SYM2ID
Old name of RB_SYM2ID.
#define CLASS_OF
Old name of rb_class_of.
#define rb_ary_new4
Old name of rb_ary_new_from_values.
#define FIXABLE
Old name of RB_FIXABLE.
#define LONG2FIX
Old name of RB_INT2FIX.
#define FIX2INT
Old name of RB_FIX2INT.
#define T_MODULE
Old name of RUBY_T_MODULE.
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
#define FIX2ULONG
Old name of RB_FIX2ULONG.
#define T_TRUE
Old name of RUBY_T_TRUE.
#define T_ICLASS
Old name of RUBY_T_ICLASS.
#define T_HASH
Old name of RUBY_T_HASH.
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
#define rb_ary_new3
Old name of rb_ary_new_from_args.
#define LONG2NUM
Old name of RB_LONG2NUM.
#define rb_exc_new3
Old name of rb_exc_new_str.
#define T_FALSE
Old name of RUBY_T_FALSE.
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
#define T_ARRAY
Old name of RUBY_T_ARRAY.
#define T_OBJECT
Old name of RUBY_T_OBJECT.
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
#define DBL2NUM
Old name of rb_float_new.
#define T_CLASS
Old name of RUBY_T_CLASS.
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
#define FL_TEST
Old name of RB_FL_TEST.
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
#define SYMBOL_P
Old name of RB_SYMBOL_P.
void rb_notimplement(void)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
#define ruby_verbose
This variable controls whether the interpreter is in debug mode.
VALUE rb_eTypeError
TypeError exception.
VALUE rb_eFatal
fatal exception.
VALUE rb_eNoMethodError
NoMethodError exception.
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
VALUE rb_eRuntimeError
RuntimeError exception.
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports unless $VERBOSE is nil.
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
@ RB_WARN_CATEGORY_STRICT_UNUSED_BLOCK
Warning is for checking unused block strictly.
VALUE rb_cClass
Class class.
VALUE rb_cArray
Array class.
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
VALUE rb_cRegexp
Regexp class.
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
VALUE rb_cHash
Hash class.
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
VALUE rb_cFloat
Float class.
VALUE rb_cProc
Proc class.
VALUE rb_cString
String class.
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
VALUE rb_ary_concat(VALUE lhs, VALUE rhs)
Destructively appends the contents of latter into the end of former.
VALUE rb_ary_shift(VALUE ary)
Destructively deletes an element from the beginning of the passed array and returns what was deleted.
VALUE rb_ary_resurrect(VALUE ary)
I guess there is no use case of this function in extension libraries, but this is a routine identical...
VALUE rb_ary_dup(VALUE ary)
Duplicates an array.
VALUE rb_ary_includes(VALUE ary, VALUE elem)
Queries if the passed array has the passed entry.
VALUE rb_ary_plus(VALUE lhs, VALUE rhs)
Creates a new array, concatenating the former to the latter.
VALUE rb_ary_cat(VALUE ary, const VALUE *train, long len)
Destructively appends multiple elements at the end of the array.
VALUE rb_check_array_type(VALUE obj)
Try converting an object to its array representation using its to_ary method, if any.
VALUE rb_ary_new(void)
Allocates a new, empty array.
VALUE rb_ary_pop(VALUE ary)
Destructively deletes an element from the end of the passed array and returns what was deleted.
VALUE rb_ary_hidden_new(long capa)
Allocates a hidden (no class) empty array.
VALUE rb_ary_push(VALUE ary, VALUE elem)
Special case of rb_ary_cat() that it adds only one element.
VALUE rb_ary_entry(VALUE ary, long off)
Queries an element of an array.
void rb_ary_store(VALUE ary, long key, VALUE val)
Destructively stores the passed value to the passed array's passed index.
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
VALUE rb_reg_nth_defined(int n, VALUE md)
Identical to rb_reg_nth_match(), except it just returns Boolean.
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
void rb_thread_schedule(void)
Tries to switch to another thread.
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
VALUE rb_sym2str(VALUE symbol)
Obtain a frozen string representation of a symbol (not including the leading colon).
int off
Offset inside of ptr.
int len
Length of the buffer.
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
#define ALLOCA_N(type, n)
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
static int RARRAY_LENINT(VALUE ary)
Identical to rb_array_len(), except it differs for the return type.
static VALUE * RARRAY_PTR(VALUE ary)
Wild use of a C pointer.
#define RARRAY_AREF(a, i)
#define RARRAY_CONST_PTR
Just another name of rb_array_const_ptr.
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
#define RBASIC(obj)
Convenient casting macro.
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
#define RHASH_SIZE(h)
Queries the size of the hash.
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
static VALUE * ROBJECT_FIELDS(VALUE obj)
Queries the instance variables.
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
#define RB_PASS_KEYWORDS
Pass keywords, final argument should be a hash of keywords.
#define RB_NO_KEYWORDS
Do not pass keywords.
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
const VALUE ary[1]
Embedded elements.
const VALUE * ptr
Pointer to the C array that holds the elements of the array.
const ID * segments
A null-terminated list of ids, used to represent a constant's path idNULL is used to represent the ::...
rb_cref_t * cref
class reference, should be marked
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Internal header for Namespace.
IFUNC (Internal FUNCtion)
const VALUE cref_or_me
class reference or rb_method_entry_t
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
#define SIZEOF_VALUE
Identical to sizeof(VALUE), except it is a macro that can also be used inside of preprocessor directi...
uintptr_t VALUE
Type that represents a Ruby object.
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.